Diff of the two buildlogs:

--
--- b1/build.log	2023-05-18 15:57:37.773195267 +0000
+++ b2/build.log	2023-05-18 16:25:39.159682982 +0000
@@ -1,6 +1,6 @@
 I: pbuilder: network access will be disabled during build
-I: Current time: Thu May 18 02:52:35 -12 2023
-I: pbuilder-time-stamp: 1684421555
+I: Current time: Thu Jun 20 12:20:40 +14 2024
+I: pbuilder-time-stamp: 1718835640
 I: Building the build Environment
 I: extracting base tarball [/var/cache/pbuilder/bookworm-reproducible-base.tgz]
 I: copying local configuration
@@ -16,7 +16,7 @@
 I: copying [./bison_3.8.2+dfsg.orig.tar.xz]
 I: copying [./bison_3.8.2+dfsg-1.debian.tar.xz]
 I: Extracting source
-gpgv: Signature made Sat Oct  2 12:23:58 2021 -12
+gpgv: Signature made Sun Oct  3 14:23:58 2021 +14
 gpgv:                using RSA key A63A3F516EA2FCA2A2FB638D01B3A9952AAE4713
 gpgv:                issuer "cklin@debian.org"
 gpgv: Can't check signature: No public key
@@ -29,52 +29,84 @@
 dpkg-source: info: applying 02_parse_h_dependency
 I: using fakeroot in build.
 I: Installing the build-deps
-I: user script /srv/workspace/pbuilder/9306/tmp/hooks/D02_print_environment starting
+I: user script /srv/workspace/pbuilder/4296/tmp/hooks/D01_modify_environment starting
+debug: Running on codethink15-arm64.
+I: Changing host+domainname to test build reproducibility
+I: Adding a custom variable just for the fun of it...
+I: Changing /bin/sh to bash
+'/bin/sh' -> '/bin/bash'
+lrwxrwxrwx 1 root root 9 Jun 20 12:20 /bin/sh -> /bin/bash
+I: Setting pbuilder2's login shell to /bin/bash
+I: Setting pbuilder2's GECOS to second user,second room,second work-phone,second home-phone,second other
+I: user script /srv/workspace/pbuilder/4296/tmp/hooks/D01_modify_environment finished
+I: user script /srv/workspace/pbuilder/4296/tmp/hooks/D02_print_environment starting
 I: set
-  BUILDDIR='/build'
-  BUILDUSERGECOS='first user,first room,first work-phone,first home-phone,first other'
-  BUILDUSERNAME='pbuilder1'
-  BUILD_ARCH='arm64'
-  DEBIAN_FRONTEND='noninteractive'
+  BASH=/bin/sh
+  BASHOPTS=checkwinsize:cmdhist:complete_fullquote:extquote:force_fignore:globasciiranges:globskipdots:hostcomplete:interactive_comments:patsub_replacement:progcomp:promptvars:sourcepath
+  BASH_ALIASES=()
+  BASH_ARGC=()
+  BASH_ARGV=()
+  BASH_CMDS=()
+  BASH_LINENO=([0]="12" [1]="0")
+  BASH_LOADABLES_PATH=/usr/local/lib/bash:/usr/lib/bash:/opt/local/lib/bash:/usr/pkg/lib/bash:/opt/pkg/lib/bash:.
+  BASH_SOURCE=([0]="/tmp/hooks/D02_print_environment" [1]="/tmp/hooks/D02_print_environment")
+  BASH_VERSINFO=([0]="5" [1]="2" [2]="15" [3]="1" [4]="release" [5]="aarch64-unknown-linux-gnu")
+  BASH_VERSION='5.2.15(1)-release'
+  BUILDDIR=/build
+  BUILDUSERGECOS='second user,second room,second work-phone,second home-phone,second other'
+  BUILDUSERNAME=pbuilder2
+  BUILD_ARCH=arm64
+  DEBIAN_FRONTEND=noninteractive
   DEB_BUILD_OPTIONS='buildinfo=+all reproducible=+all parallel=8 '
-  DISTRIBUTION='bookworm'
-  HOME='/var/lib/jenkins'
-  HOST_ARCH='arm64'
+  DIRSTACK=()
+  DISTRIBUTION=bookworm
+  EUID=0
+  FUNCNAME=([0]="Echo" [1]="main")
+  GROUPS=()
+  HOME=/var/lib/jenkins
+  HOSTNAME=i-capture-the-hostname
+  HOSTTYPE=aarch64
+  HOST_ARCH=arm64
   IFS=' 	
   '
-  LANG='C'
-  LANGUAGE='en_US:en'
-  LC_ALL='C'
-  MAIL='/var/mail/root'
-  OPTIND='1'
-  PATH='/usr/sbin:/usr/bin:/sbin:/bin:/usr/games'
-  PBCURRENTCOMMANDLINEOPERATION='build'
-  PBUILDER_OPERATION='build'
-  PBUILDER_PKGDATADIR='/usr/share/pbuilder'
-  PBUILDER_PKGLIBDIR='/usr/lib/pbuilder'
-  PBUILDER_SYSCONFDIR='/etc'
-  PPID='9306'
-  PS1='# '
-  PS2='> '
+  LANG=C
+  LANGUAGE=nl_BE:nl
+  LC_ALL=C
+  MACHTYPE=aarch64-unknown-linux-gnu
+  MAIL=/var/mail/root
+  OPTERR=1
+  OPTIND=1
+  OSTYPE=linux-gnu
+  PATH=/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path
+  PBCURRENTCOMMANDLINEOPERATION=build
+  PBUILDER_OPERATION=build
+  PBUILDER_PKGDATADIR=/usr/share/pbuilder
+  PBUILDER_PKGLIBDIR=/usr/lib/pbuilder
+  PBUILDER_SYSCONFDIR=/etc
+  PIPESTATUS=([0]="0")
+  POSIXLY_CORRECT=y
+  PPID=4296
   PS4='+ '
-  PWD='/'
-  SHELL='/bin/bash'
-  SHLVL='2'
-  SUDO_COMMAND='/usr/bin/timeout -k 18.1h 18h /usr/bin/ionice -c 3 /usr/bin/nice /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/r-b-build.921cwt9n/pbuilderrc_Zxhl --distribution bookworm --hookdir /etc/pbuilder/first-build-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/bookworm-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/r-b-build.921cwt9n/b1 --logfile b1/build.log bison_3.8.2+dfsg-1.dsc'
-  SUDO_GID='117'
-  SUDO_UID='110'
-  SUDO_USER='jenkins'
-  TERM='unknown'
-  TZ='/usr/share/zoneinfo/Etc/GMT+12'
-  USER='root'
-  USERNAME='root'
-  _='/usr/bin/systemd-run'
-  http_proxy='http://192.168.101.16:3128'
+  PWD=/
+  SHELL=/bin/bash
+  SHELLOPTS=braceexpand:errexit:hashall:interactive-comments:posix
+  SHLVL=3
+  SUDO_COMMAND='/usr/bin/timeout -k 24.1h 24h /usr/bin/ionice -c 3 /usr/bin/nice -n 11 /usr/bin/unshare --uts -- /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/r-b-build.921cwt9n/pbuilderrc_Zosg --distribution bookworm --hookdir /etc/pbuilder/rebuild-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/bookworm-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/r-b-build.921cwt9n/b2 --logfile b2/build.log --extrapackages usrmerge bison_3.8.2+dfsg-1.dsc'
+  SUDO_GID=117
+  SUDO_UID=110
+  SUDO_USER=jenkins
+  TERM=unknown
+  TZ=/usr/share/zoneinfo/Etc/GMT-14
+  UID=0
+  USER=root
+  USERNAME=root
+  _='I: set'
+  http_proxy=http://192.168.101.16:3128
 I: uname -a
-  Linux codethink12-arm64 4.15.0-211-generic #222-Ubuntu SMP Tue Apr 18 18:58:27 UTC 2023 aarch64 GNU/Linux
+  Linux i-capture-the-hostname 4.15.0-211-generic #222-Ubuntu SMP Tue Apr 18 18:58:27 UTC 2023 aarch64 GNU/Linux
 I: ls -l /bin
-  lrwxrwxrwx 1 root root 7 May 17 22:25 /bin -> usr/bin
-I: user script /srv/workspace/pbuilder/9306/tmp/hooks/D02_print_environment finished
+  lrwxrwxrwx 1 root root 7 Jun 18 06:47 /bin -> usr/bin
+I: user script /srv/workspace/pbuilder/4296/tmp/hooks/D02_print_environment finished
  -> Attempting to satisfy build-dependencies
  -> Creating pbuilder-satisfydepends-dummy package
 Package: pbuilder-satisfydepends-dummy
@@ -151,7 +183,7 @@
 Get: 31 http://deb.debian.org/debian bookworm/main arm64 po-debconf all 1.0.21+nmu1 [248 kB]
 Get: 32 http://deb.debian.org/debian bookworm/main arm64 debhelper all 13.11.4 [942 kB]
 Get: 33 http://deb.debian.org/debian bookworm/main arm64 help2man arm64 1.49.3 [198 kB]
-Fetched 19.0 MB in 4s (4775 kB/s)
+Fetched 19.0 MB in 0s (49.1 MB/s)
 debconf: delaying package configuration, since apt-utils is not installed
 Selecting previously unselected package m4.
 (Reading database ... 
(Reading database ... 5%
(Reading database ... 10%
(Reading database ... 15%
(Reading database ... 20%
(Reading database ... 25%
(Reading database ... 30%
(Reading database ... 35%
(Reading database ... 40%
(Reading database ... 45%
(Reading database ... 50%
(Reading database ... 55%
(Reading database ... 60%
(Reading database ... 65%
(Reading database ... 70%
(Reading database ... 75%
(Reading database ... 80%
(Reading database ... 85%
(Reading database ... 90%
(Reading database ... 95%
(Reading database ... 100%
(Reading database ... 19616 files and directories currently installed.)
@@ -300,10 +332,15 @@
 Reading package lists...
 Building dependency tree...
 Reading state information...
+usrmerge is already the newest version (35).
 fakeroot is already the newest version (1.31-1.2).
 0 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.
 I: Building the package
-I: Running cd /build/bison-3.8.2+dfsg/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-genchanges -S  > ../bison_3.8.2+dfsg-1_source.changes
+I: user script /srv/workspace/pbuilder/4296/tmp/hooks/A99_set_merged_usr starting
+Re-configuring usrmerge...
+I: user script /srv/workspace/pbuilder/4296/tmp/hooks/A99_set_merged_usr finished
+hostname: Temporary failure in name resolution
+I: Running cd /build/bison-3.8.2+dfsg/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-genchanges -S  > ../bison_3.8.2+dfsg-1_source.changes
 dpkg-buildpackage: info: source package bison
 dpkg-buildpackage: info: source version 2:3.8.2+dfsg-1
 dpkg-buildpackage: info: source distribution unstable
@@ -913,13 +950,12 @@
 	make -j8
 make[1]: Entering directory '/build/bison-3.8.2+dfsg'
 rm -f examples/c/reccalc/scan.stamp examples/c/reccalc/scan.stamp.tmp
-/usr/bin/mkdir -p examples/c/reccalc
 rm -f lib/alloca.h-t lib/alloca.h && \
 { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \
   sed -e 's|@''HAVE_ALLOCA_H''@|1|g' < ./lib/alloca.in.h; \
 } > lib/alloca.h-t && \
 mv -f lib/alloca.h-t lib/alloca.h
-touch examples/c/reccalc/scan.stamp.tmp
+/usr/bin/mkdir -p examples/c/reccalc
 rm -f lib/configmake.h-t && \
 { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \
   echo '#if HAVE_WINSOCK2_H'; \
@@ -955,7 +991,7 @@
   echo '#define PKGLIBEXECDIR "/usr/libexec/bison"'; \
 } | sed '/""/d' > lib/configmake.h-t && \
 mv -f lib/configmake.h-t lib/configmake.h
-flex   -oexamples/c/reccalc/scan.c --header=examples/c/reccalc/scan.h ./examples/c/reccalc/scan.l
+touch examples/c/reccalc/scan.stamp.tmp
 rm -f lib/fcntl.h-t lib/fcntl.h && \
 { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \
   sed -e 's|@''GUARD_PREFIX''@|GL|g' \
@@ -982,6 +1018,7 @@
       < ./lib/fcntl.in.h; \
 } > lib/fcntl.h-t && \
 mv lib/fcntl.h-t lib/fcntl.h
+flex   -oexamples/c/reccalc/scan.c --header=examples/c/reccalc/scan.h ./examples/c/reccalc/scan.l
 rm -f lib/iconv.h-t lib/iconv.h && \
 { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */' && \
   sed -e 's|@''GUARD_PREFIX''@|GL|g' \
@@ -1045,7 +1082,6 @@
       < ./lib/limits.in.h; \
 } > lib/limits.h-t && \
 mv lib/limits.h-t lib/limits.h
-mv examples/c/reccalc/scan.stamp.tmp examples/c/reccalc/scan.stamp
 rm -f lib/locale.h-t lib/locale.h && \
 { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */' && \
   sed -e 's|@''GUARD_PREFIX''@|GL|g' \
@@ -1075,6 +1111,7 @@
       < ./lib/locale.in.h; \
 } > lib/locale.h-t && \
 mv lib/locale.h-t lib/locale.h
+mv examples/c/reccalc/scan.stamp.tmp examples/c/reccalc/scan.stamp
 rm -f lib/math.h-t lib/math.h && \
 { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */' && \
   sed -e 's|@''GUARD_PREFIX''@|GL|g' \
@@ -1918,6 +1955,7 @@
 } > lib/sys/stat.h-t && \
 mv lib/sys/stat.h-t lib/sys/stat.h
 /usr/bin/mkdir -p lib/sys
+/usr/bin/mkdir -p lib/sys
 rm -f lib/sys/time.h-t lib/sys/time.h && \
 { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \
   sed -e 's|@''GUARD_PREFIX''@|GL|g' \
@@ -1938,8 +1976,6 @@
       < ./lib/sys_time.in.h; \
 } > lib/sys/time.h-t && \
 mv lib/sys/time.h-t lib/sys/time.h
-/usr/bin/mkdir -p lib/sys
-/usr/bin/mkdir -p lib/sys
 rm -f lib/sys/times.h-t lib/sys/times.h && \
 { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \
   sed -e 's|@''GUARD_PREFIX''@|GL|g' \
@@ -1957,6 +1993,7 @@
 } > lib/sys/times.h-t && \
 mv lib/sys/times.h-t lib/sys/times.h
 /usr/bin/mkdir -p lib/sys
+/usr/bin/mkdir -p lib/sys
 rm -f lib/sys/types.h-t lib/sys/types.h && \
 { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \
   sed -e 's|@''GUARD_PREFIX''@|GL|g' \
@@ -1969,19 +2006,6 @@
       < ./lib/sys_types.in.h; \
 } > lib/sys/types.h-t && \
 mv lib/sys/types.h-t lib/sys/types.h
-rm -f lib/sys/wait.h-t lib/sys/wait.h && \
-{ echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \
-  sed -e 's|@''GUARD_PREFIX''@|GL|g' \
-      -e 's|@''INCLUDE_NEXT''@|include_next|g' \
-      -e 's|@''PRAGMA_SYSTEM_HEADER''@|#pragma GCC system_header|g' \
-      -e 's|@''PRAGMA_COLUMNS''@||g' \
-      -e 's|@''NEXT_SYS_WAIT_H''@|<sys/wait.h>|g' \
-      -e 's/@''GNULIB_WAITPID''@/1/g' \
-      -e '/definitions of _GL_FUNCDECL_RPL/r ./lib/c++defs.h' \
-      -e '/definition of _GL_WARN_ON_USE/r ./lib/warn-on-use.h' \
-      < ./lib/sys_wait.in.h; \
-} > lib/sys/wait.h-t && \
-mv lib/sys/wait.h-t lib/sys/wait.h
 rm -f lib/termios.h-t lib/termios.h && \
 { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \
   sed -e 's|@''GUARD_PREFIX''@|GL|g' \
@@ -1997,6 +2021,19 @@
     < ./lib/termios.in.h; \
 } > lib/termios.h-t && \
 mv lib/termios.h-t lib/termios.h
+rm -f lib/sys/wait.h-t lib/sys/wait.h && \
+{ echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \
+  sed -e 's|@''GUARD_PREFIX''@|GL|g' \
+      -e 's|@''INCLUDE_NEXT''@|include_next|g' \
+      -e 's|@''PRAGMA_SYSTEM_HEADER''@|#pragma GCC system_header|g' \
+      -e 's|@''PRAGMA_COLUMNS''@||g' \
+      -e 's|@''NEXT_SYS_WAIT_H''@|<sys/wait.h>|g' \
+      -e 's/@''GNULIB_WAITPID''@/1/g' \
+      -e '/definitions of _GL_FUNCDECL_RPL/r ./lib/c++defs.h' \
+      -e '/definition of _GL_WARN_ON_USE/r ./lib/warn-on-use.h' \
+      < ./lib/sys_wait.in.h; \
+} > lib/sys/wait.h-t && \
+mv lib/sys/wait.h-t lib/sys/wait.h
 rm -f lib/time.h-t lib/time.h && \
 { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */' && \
   sed -e 's|@''GUARD_PREFIX''@|GL|g' \
@@ -2416,66 +2453,6 @@
 make[3]: Leaving directory '/build/bison-3.8.2+dfsg/gnulib-po'
 Making all in .
 make[3]: Entering directory '/build/bison-3.8.2+dfsg'
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-bitsetv.o `test -f 'lib/bitsetv.c' || echo './'`lib/bitsetv.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-c-ctype.o `test -f 'lib/c-ctype.c' || echo './'`lib/c-ctype.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-c-strcasecmp.o `test -f 'lib/c-strcasecmp.c' || echo './'`lib/c-strcasecmp.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-c-strncasecmp.o `test -f 'lib/c-strncasecmp.c' || echo './'`lib/c-strncasecmp.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-canonicalize.o `test -f 'lib/canonicalize.c' || echo './'`lib/canonicalize.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-careadlinkat.o `test -f 'lib/careadlinkat.c' || echo './'`lib/careadlinkat.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-cloexec.o `test -f 'lib/cloexec.c' || echo './'`lib/cloexec.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-close-stream.o `test -f 'lib/close-stream.c' || echo './'`lib/close-stream.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-closeout.o `test -f 'lib/closeout.c' || echo './'`lib/closeout.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-concat-filename.o `test -f 'lib/concat-filename.c' || echo './'`lib/concat-filename.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-dirname.o `test -f 'lib/dirname.c' || echo './'`lib/dirname.c
-lib/careadlinkat.c: In function 'careadlinkat':
-lib/careadlinkat.c:178:5: warning: #warning "GCC might issue a bogus -Wreturn-local-addr warning here." [-Wcpp]
-  178 |    #warning "GCC might issue a bogus -Wreturn-local-addr warning here."
-      |     ^~~~~~~
-lib/careadlinkat.c:179:5: warning: #warning "See <https://gcc.gnu.org/bugzilla/show_bug.cgi?id=93644>." [-Wcpp]
-  179 |    #warning "See <https://gcc.gnu.org/bugzilla/show_bug.cgi?id=93644>."
-      |     ^~~~~~~
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-basename.o `test -f 'lib/basename.c' || echo './'`lib/basename.c
-lib/careadlinkat.c:182:10: warning: function may return address of local variable [-Wreturn-local-addr]
-  182 |   return readlink_stk (fd, filename, buffer, buffer_size, alloc,
-      |          ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-  183 |                        preadlinkat, stack_buf);
-      |                        ~~~~~~~~~~~~~~~~~~~~~~~
-lib/careadlinkat.c:181:8: note: declared here
-  181 |   char stack_buf[STACK_BUF_SIZE];
-      |        ^~~~~~~~~
-lib/canonicalize.c: In function 'canonicalize_filename_mode':
-lib/canonicalize.c:484:5: warning: #warning "GCC might issue a bogus -Wreturn-local-addr warning here." [-Wcpp]
-  484 |    #warning "GCC might issue a bogus -Wreturn-local-addr warning here."
-      |     ^~~~~~~
-lib/canonicalize.c:485:5: warning: #warning "See <https://gcc.gnu.org/bugzilla/show_bug.cgi?id=93644>." [-Wcpp]
-  485 |    #warning "See <https://gcc.gnu.org/bugzilla/show_bug.cgi?id=93644>."
-      |     ^~~~~~~
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-dirname-lgpl.o `test -f 'lib/dirname-lgpl.c' || echo './'`lib/dirname-lgpl.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-stripslash.o `test -f 'lib/stripslash.c' || echo './'`lib/stripslash.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-execute.o `test -f 'lib/execute.c' || echo './'`lib/execute.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-exitfail.o `test -f 'lib/exitfail.c' || echo './'`lib/exitfail.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-fatal-signal.o `test -f 'lib/fatal-signal.c' || echo './'`lib/fatal-signal.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-fd-safer-flag.o `test -f 'lib/fd-safer-flag.c' || echo './'`lib/fd-safer-flag.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-dup-safer-flag.o `test -f 'lib/dup-safer-flag.c' || echo './'`lib/dup-safer-flag.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-file-set.o `test -f 'lib/file-set.c' || echo './'`lib/file-set.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-findprog-in.o `test -f 'lib/findprog-in.c' || echo './'`lib/findprog-in.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-fopen-safer.o `test -f 'lib/fopen-safer.c' || echo './'`lib/fopen-safer.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-fstrcmp.o `test -f 'lib/fstrcmp.c' || echo './'`lib/fstrcmp.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gethrxtime.o `test -f 'lib/gethrxtime.c' || echo './'`lib/gethrxtime.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-xtime.o `test -f 'lib/xtime.c' || echo './'`lib/xtime.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-getprogname.o `test -f 'lib/getprogname.c' || echo './'`lib/getprogname.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gettime.o `test -f 'lib/gettime.c' || echo './'`lib/gettime.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-hard-locale.o `test -f 'lib/hard-locale.c' || echo './'`lib/hard-locale.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-hash.o `test -f 'lib/hash.c' || echo './'`lib/hash.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_hash_map.o `test -f 'lib/gl_hash_map.c' || echo './'`lib/gl_hash_map.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-hash-pjw.o `test -f 'lib/hash-pjw.c' || echo './'`lib/hash-pjw.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-hash-triple-simple.o `test -f 'lib/hash-triple-simple.c' || echo './'`lib/hash-triple-simple.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-ialloc.o `test -f 'lib/ialloc.c' || echo './'`lib/ialloc.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-integer_length.o `test -f 'lib/integer_length.c' || echo './'`lib/integer_length.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-integer_length_l.o `test -f 'lib/integer_length_l.c' || echo './'`lib/integer_length_l.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_linked_list.o `test -f 'lib/gl_linked_list.c' || echo './'`lib/gl_linked_list.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_list.o `test -f 'lib/gl_list.c' || echo './'`lib/gl_list.c
-gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-localcharset.o `test -f 'lib/localcharset.c' || echo './'`lib/localcharset.c
 gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_map.o `test -f 'lib/gl_map.c' || echo './'`lib/gl_map.c
 gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-math.o `test -f 'lib/math.c' || echo './'`lib/math.c
 gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-mbchar.o `test -f 'lib/mbchar.c' || echo './'`lib/mbchar.c
@@ -2594,6 +2571,66 @@
 gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/bitset/libbison_a-table.o `test -f 'lib/bitset/table.c' || echo './'`lib/bitset/table.c
 gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/bitset/libbison_a-list.o `test -f 'lib/bitset/list.c' || echo './'`lib/bitset/list.c
 gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/bitset/libbison_a-vector.o `test -f 'lib/bitset/vector.c' || echo './'`lib/bitset/vector.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-bitsetv.o `test -f 'lib/bitsetv.c' || echo './'`lib/bitsetv.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-c-ctype.o `test -f 'lib/c-ctype.c' || echo './'`lib/c-ctype.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-c-strcasecmp.o `test -f 'lib/c-strcasecmp.c' || echo './'`lib/c-strcasecmp.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-c-strncasecmp.o `test -f 'lib/c-strncasecmp.c' || echo './'`lib/c-strncasecmp.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-canonicalize.o `test -f 'lib/canonicalize.c' || echo './'`lib/canonicalize.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-careadlinkat.o `test -f 'lib/careadlinkat.c' || echo './'`lib/careadlinkat.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-cloexec.o `test -f 'lib/cloexec.c' || echo './'`lib/cloexec.c
+lib/careadlinkat.c: In function 'careadlinkat':
+lib/careadlinkat.c:178:5: warning: #warning "GCC might issue a bogus -Wreturn-local-addr warning here." [-Wcpp]
+  178 |    #warning "GCC might issue a bogus -Wreturn-local-addr warning here."
+      |     ^~~~~~~
+lib/careadlinkat.c:179:5: warning: #warning "See <https://gcc.gnu.org/bugzilla/show_bug.cgi?id=93644>." [-Wcpp]
+  179 |    #warning "See <https://gcc.gnu.org/bugzilla/show_bug.cgi?id=93644>."
+      |     ^~~~~~~
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-close-stream.o `test -f 'lib/close-stream.c' || echo './'`lib/close-stream.c
+lib/careadlinkat.c:182:10: warning: function may return address of local variable [-Wreturn-local-addr]
+  182 |   return readlink_stk (fd, filename, buffer, buffer_size, alloc,
+      |          ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+  183 |                        preadlinkat, stack_buf);
+      |                        ~~~~~~~~~~~~~~~~~~~~~~~
+lib/careadlinkat.c:181:8: note: declared here
+  181 |   char stack_buf[STACK_BUF_SIZE];
+      |        ^~~~~~~~~
+lib/canonicalize.c: In function 'canonicalize_filename_mode':
+lib/canonicalize.c:484:5: warning: #warning "GCC might issue a bogus -Wreturn-local-addr warning here." [-Wcpp]
+  484 |    #warning "GCC might issue a bogus -Wreturn-local-addr warning here."
+      |     ^~~~~~~
+lib/canonicalize.c:485:5: warning: #warning "See <https://gcc.gnu.org/bugzilla/show_bug.cgi?id=93644>." [-Wcpp]
+  485 |    #warning "See <https://gcc.gnu.org/bugzilla/show_bug.cgi?id=93644>."
+      |     ^~~~~~~
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-closeout.o `test -f 'lib/closeout.c' || echo './'`lib/closeout.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-concat-filename.o `test -f 'lib/concat-filename.c' || echo './'`lib/concat-filename.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-dirname.o `test -f 'lib/dirname.c' || echo './'`lib/dirname.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-basename.o `test -f 'lib/basename.c' || echo './'`lib/basename.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-dirname-lgpl.o `test -f 'lib/dirname-lgpl.c' || echo './'`lib/dirname-lgpl.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-stripslash.o `test -f 'lib/stripslash.c' || echo './'`lib/stripslash.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-execute.o `test -f 'lib/execute.c' || echo './'`lib/execute.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-exitfail.o `test -f 'lib/exitfail.c' || echo './'`lib/exitfail.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-fatal-signal.o `test -f 'lib/fatal-signal.c' || echo './'`lib/fatal-signal.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-fd-safer-flag.o `test -f 'lib/fd-safer-flag.c' || echo './'`lib/fd-safer-flag.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-dup-safer-flag.o `test -f 'lib/dup-safer-flag.c' || echo './'`lib/dup-safer-flag.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-file-set.o `test -f 'lib/file-set.c' || echo './'`lib/file-set.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-findprog-in.o `test -f 'lib/findprog-in.c' || echo './'`lib/findprog-in.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-fopen-safer.o `test -f 'lib/fopen-safer.c' || echo './'`lib/fopen-safer.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-fstrcmp.o `test -f 'lib/fstrcmp.c' || echo './'`lib/fstrcmp.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gethrxtime.o `test -f 'lib/gethrxtime.c' || echo './'`lib/gethrxtime.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-xtime.o `test -f 'lib/xtime.c' || echo './'`lib/xtime.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-getprogname.o `test -f 'lib/getprogname.c' || echo './'`lib/getprogname.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gettime.o `test -f 'lib/gettime.c' || echo './'`lib/gettime.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-hard-locale.o `test -f 'lib/hard-locale.c' || echo './'`lib/hard-locale.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-hash.o `test -f 'lib/hash.c' || echo './'`lib/hash.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_hash_map.o `test -f 'lib/gl_hash_map.c' || echo './'`lib/gl_hash_map.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-hash-pjw.o `test -f 'lib/hash-pjw.c' || echo './'`lib/hash-pjw.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-hash-triple-simple.o `test -f 'lib/hash-triple-simple.c' || echo './'`lib/hash-triple-simple.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-ialloc.o `test -f 'lib/ialloc.c' || echo './'`lib/ialloc.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-integer_length.o `test -f 'lib/integer_length.c' || echo './'`lib/integer_length.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-integer_length_l.o `test -f 'lib/integer_length_l.c' || echo './'`lib/integer_length_l.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_linked_list.o `test -f 'lib/gl_linked_list.c' || echo './'`lib/gl_linked_list.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_list.o `test -f 'lib/gl_list.c' || echo './'`lib/gl_list.c
+gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-localcharset.o `test -f 'lib/localcharset.c' || echo './'`lib/localcharset.c
 gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/glthread/libbison_a-lock.o `test -f 'lib/glthread/lock.c' || echo './'`lib/glthread/lock.c
 gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/malloc/libbison_a-scratch_buffer_dupfree.o `test -f 'lib/malloc/scratch_buffer_dupfree.c' || echo './'`lib/malloc/scratch_buffer_dupfree.c
 gcc -DEXEEXT=\"\"   -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/malloc/libbison_a-scratch_buffer_grow.o `test -f 'lib/malloc/scratch_buffer_grow.c' || echo './'`lib/malloc/scratch_buffer_grow.c
@@ -2616,8 +2653,8 @@
   sed -e 's,^Usage: .*/bison \[OPTION\],Usage: bison [OPTION],g' \
       -e '/translation bugs/d'  >>doc/bison.help.tmp
 ./build-aux/move-if-change doc/bison.help.tmp doc/bison.help
-if /bin/bash '/build/bison-3.8.2+dfsg/build-aux/missing' help2man --version >/dev/null 2>&1; then	\
-  /bin/bash '/build/bison-3.8.2+dfsg/build-aux/missing' help2man							\
+if /bin/sh '/build/bison-3.8.2+dfsg/build-aux/missing' help2man --version >/dev/null 2>&1; then	\
+  /bin/sh '/build/bison-3.8.2+dfsg/build-aux/missing' help2man							\
     --include=./doc/bison.x				\
     --output=doc/bison.1.tmp tests/bison &&				\
   { sed 's/^\(\.TH[^"]*"[^"]*"[^"]*\)"[^"]*"/\1/' doc/bison.1     >doc/bison.1a.tmp || true; } &&		\
@@ -2682,61 +2719,61 @@
 ./build-aux/move-if-change doc/bison.help.tmp doc/bison.help
 make   examples/c/calc/calc examples/c/glr/c++-types examples/c/lexcalc/lexcalc examples/c/mfcalc/mfcalc examples/c/pushcalc/calc examples/c/reccalc/reccalc examples/c/rpcalc/rpcalc examples/c++/calc++/calc++ examples/c++/glr/c++-types examples/c++/simple examples/c++/variant examples/c++/variant-11   ./tests/bison tests/atconfig tests/atlocal
 make[4]: Entering directory '/build/bison-3.8.2+dfsg'
-/bin/bash ./build-aux/ylwrap examples/c/calc/calc.y y.tab.c examples/c/calc/calc.c y.tab.h `echo examples/c/calc/calc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/calc/calc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines 
-/bin/bash ./build-aux/ylwrap examples/c/glr/c++-types.y y.tab.c examples/c/glr/c++-types.c y.tab.h `echo examples/c/glr/c++-types.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/glr/c++-types.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines 
-/bin/bash ./build-aux/ylwrap examples/c/lexcalc/parse.y y.tab.c examples/c/lexcalc/parse.c y.tab.h `echo examples/c/lexcalc/parse.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/lexcalc/parse.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines 
+/bin/sh ./build-aux/ylwrap examples/c/calc/calc.y y.tab.c examples/c/calc/calc.c y.tab.h `echo examples/c/calc/calc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/calc/calc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines 
+/bin/sh ./build-aux/ylwrap examples/c/glr/c++-types.y y.tab.c examples/c/glr/c++-types.c y.tab.h `echo examples/c/glr/c++-types.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/glr/c++-types.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines 
+/bin/sh ./build-aux/ylwrap examples/c/lexcalc/parse.y y.tab.c examples/c/lexcalc/parse.c y.tab.h `echo examples/c/lexcalc/parse.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/lexcalc/parse.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines 
 \
-/bin/bash ./build-aux/ylwrap `test -f 'examples/c/lexcalc/scan.l' || echo './'`examples/c/lexcalc/scan.l lex.yy.c examples/c/lexcalc/scan.c -- flex  
-/bin/bash ./build-aux/ylwrap examples/c/mfcalc/mfcalc.y y.tab.c examples/c/mfcalc/mfcalc.c y.tab.h `echo examples/c/mfcalc/mfcalc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/mfcalc/mfcalc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines 
-/bin/bash ./build-aux/ylwrap examples/c/pushcalc/calc.y y.tab.c examples/c/pushcalc/calc.c y.tab.h `echo examples/c/pushcalc/calc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/pushcalc/calc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines 
-/bin/bash ./build-aux/ylwrap examples/c/reccalc/parse.y y.tab.c examples/c/reccalc/parse.c y.tab.h `echo examples/c/reccalc/parse.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/reccalc/parse.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines 
-/bin/bash ./build-aux/ylwrap examples/c/rpcalc/rpcalc.y y.tab.c examples/c/rpcalc/rpcalc.c y.tab.h `echo examples/c/rpcalc/rpcalc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/rpcalc/rpcalc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines 
+/bin/sh ./build-aux/ylwrap `test -f 'examples/c/lexcalc/scan.l' || echo './'`examples/c/lexcalc/scan.l lex.yy.c examples/c/lexcalc/scan.c -- flex  
+/bin/sh ./build-aux/ylwrap examples/c/mfcalc/mfcalc.y y.tab.c examples/c/mfcalc/mfcalc.c y.tab.h `echo examples/c/mfcalc/mfcalc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/mfcalc/mfcalc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines 
+/bin/sh ./build-aux/ylwrap examples/c/pushcalc/calc.y y.tab.c examples/c/pushcalc/calc.c y.tab.h `echo examples/c/pushcalc/calc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/pushcalc/calc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines 
+/bin/sh ./build-aux/ylwrap examples/c/reccalc/parse.y y.tab.c examples/c/reccalc/parse.c y.tab.h `echo examples/c/reccalc/parse.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/reccalc/parse.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines 
+/bin/sh ./build-aux/ylwrap examples/c/rpcalc/rpcalc.y y.tab.c examples/c/rpcalc/rpcalc.c y.tab.h `echo examples/c/rpcalc/rpcalc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/rpcalc/rpcalc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines 
 rm -f examples/c++/calc++/parser.stamp
 touch examples/c++/calc++/parser.stamp.tmp
 ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines  -o examples/c++/calc++/parser.cc examples/c++/calc++/parser.yy
+updating examples/c/pushcalc/calc.output
 updating examples/c/calc/calc.output
-updating examples/c/calc/calc.h
-\
-/bin/bash ./build-aux/ylwrap `test -f 'examples/c++/calc++/scanner.ll' || echo './'`examples/c++/calc++/scanner.ll lex.yy.c examples/c++/calc++/scanner.cc -- flex  
 updating examples/c/lexcalc/parse.output
-updating examples/c/glr/c++-types.output
-updating examples/c/rpcalc/rpcalc.output
+updating examples/c/pushcalc/calc.h
+updating examples/c/mfcalc/mfcalc.output
+\
+/bin/sh ./build-aux/ylwrap `test -f 'examples/c++/calc++/scanner.ll' || echo './'`examples/c++/calc++/scanner.ll lex.yy.c examples/c++/calc++/scanner.cc -- flex  
 updating examples/c/lexcalc/parse.h
+updating examples/c/glr/c++-types.output
+updating examples/c/calc/calc.h
 rm -f examples/c++/glr/c++-types.stamp
 touch examples/c++/glr/c++-types.stamp.tmp
-updating examples/c/glr/c++-types.h
 ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines  -o examples/c++/glr/c++-types.cc examples/c++/glr/c++-types.yy
-updating examples/c/rpcalc/rpcalc.h
-updating examples/c/mfcalc/mfcalc.output
-updating examples/c/pushcalc/calc.output
 \
-/bin/bash ./build-aux/ylwrap `test -f 'examples/c++/simple.yy' || echo './'`examples/c++/simple.yy y.tab.c examples/c++/simple.cc y.tab.h `echo examples/c++/simple.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/simple.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines 
-updating examples/c/reccalc/parse.output
-\
-/bin/bash ./build-aux/ylwrap `test -f 'examples/c++/variant.yy' || echo './'`examples/c++/variant.yy y.tab.c examples/c++/variant.cc y.tab.h `echo examples/c++/variant.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/variant.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines 
+/bin/sh ./build-aux/ylwrap `test -f 'examples/c++/simple.yy' || echo './'`examples/c++/simple.yy y.tab.c examples/c++/simple.cc y.tab.h `echo examples/c++/simple.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/simple.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines 
 updating examples/c/mfcalc/mfcalc.h
 mv -f examples/c++/calc++/parser.stamp.tmp examples/c++/calc++/parser.stamp
+updating examples/c/glr/c++-types.h
+\
+/bin/sh ./build-aux/ylwrap `test -f 'examples/c++/variant.yy' || echo './'`examples/c++/variant.yy y.tab.c examples/c++/variant.cc y.tab.h `echo examples/c++/variant.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/variant.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines 
 \
-/bin/bash ./build-aux/ylwrap `test -f 'examples/c++/variant-11.yy' || echo './'`examples/c++/variant-11.yy y.tab.c examples/c++/variant-11.cc y.tab.h `echo examples/c++/variant-11.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/variant-11.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines 
+/bin/sh ./build-aux/ylwrap `test -f 'examples/c++/variant-11.yy' || echo './'`examples/c++/variant-11.yy y.tab.c examples/c++/variant-11.cc y.tab.h `echo examples/c++/variant-11.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/variant-11.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines 
 make[4]: 'tests/bison' is up to date.
 make[4]: Nothing to be done for 'tests/atconfig'.
 make[4]: 'tests/atlocal' is up to date.
-updating examples/c/pushcalc/calc.h
+updating examples/c/reccalc/parse.output
+updating examples/c/rpcalc/rpcalc.output
+updating examples/c/rpcalc/rpcalc.h
+updating examples/c/reccalc/parse.h
 gcc -DEXEEXT=\"\"   -I./examples/c/calc -I./examples/c/calc -Wdate-time -D_FORTIFY_SOURCE=2    -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/calc/examples_c_calc_calc-calc.o `test -f 'examples/c/calc/calc.c' || echo './'`examples/c/calc/calc.c
 gcc -DEXEEXT=\"\"   -I./examples/c/glr -I./examples/c/glr -Wdate-time -D_FORTIFY_SOURCE=2    -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/glr/examples_c_glr_c___types-c++-types.o `test -f 'examples/c/glr/c++-types.c' || echo './'`examples/c/glr/c++-types.c
 gcc -DEXEEXT=\"\"   -I./examples/c/lexcalc -I./examples/c/lexcalc -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/lexcalc/examples_c_lexcalc_lexcalc-parse.o `test -f 'examples/c/lexcalc/parse.c' || echo './'`examples/c/lexcalc/parse.c
-updating examples/c/reccalc/parse.h
 gcc -DEXEEXT=\"\"   -I./examples/c/lexcalc -I./examples/c/lexcalc -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/lexcalc/examples_c_lexcalc_lexcalc-scan.o `test -f 'examples/c/lexcalc/scan.c' || echo './'`examples/c/lexcalc/scan.c
 mv -f examples/c++/glr/c++-types.stamp.tmp examples/c++/glr/c++-types.stamp
 gcc -DEXEEXT=\"\"   -I./examples/c/mfcalc -I./examples/c/mfcalc -Wdate-time -D_FORTIFY_SOURCE=2    -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/mfcalc/examples_c_mfcalc_mfcalc-mfcalc.o `test -f 'examples/c/mfcalc/mfcalc.c' || echo './'`examples/c/mfcalc/mfcalc.c
+updating examples/c++/variant-11.output
 updating examples/c++/simple.output
-updating examples/c++/simple.hh
+updating examples/c++/variant-11.hh
 gcc -DEXEEXT=\"\"   -I./examples/c/pushcalc -I./examples/c/pushcalc -Wdate-time -D_FORTIFY_SOURCE=2    -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/pushcalc/examples_c_pushcalc_calc-calc.o `test -f 'examples/c/pushcalc/calc.c' || echo './'`examples/c/pushcalc/calc.c
-updating examples/c++/variant-11.output
+updating examples/c++/simple.hh
+gcc -DEXEEXT=\"\"   -I./examples/c/reccalc -I./examples/c/reccalc -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/reccalc/examples_c_reccalc_reccalc-parse.o `test -f 'examples/c/reccalc/parse.c' || echo './'`examples/c/reccalc/parse.c
 updating examples/c++/variant.output
 updating examples/c++/variant.hh
-updating examples/c++/variant-11.hh
-gcc -DEXEEXT=\"\"   -I./examples/c/reccalc -I./examples/c/reccalc -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/reccalc/examples_c_reccalc_reccalc-parse.o `test -f 'examples/c/reccalc/parse.c' || echo './'`examples/c/reccalc/parse.c
 gcc -DEXEEXT=\"\"   -I./examples/c/reccalc -I./examples/c/reccalc -Wdate-time -D_FORTIFY_SOURCE=2  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/reccalc/examples_c_reccalc_reccalc-scan.o `test -f 'examples/c/reccalc/scan.c' || echo './'`examples/c/reccalc/scan.c
 gcc -DEXEEXT=\"\"   -I./examples/c/rpcalc -Wdate-time -D_FORTIFY_SOURCE=2    -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/rpcalc/examples_c_rpcalc_rpcalc-rpcalc.o `test -f 'examples/c/rpcalc/rpcalc.c' || echo './'`examples/c/rpcalc/rpcalc.c
 g++ -DEXEEXT=\"\"   -I. -Wdate-time -D_FORTIFY_SOURCE=2 -std=c++11  -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/simple-simple.o `test -f 'examples/c++/simple.cc' || echo './'`examples/c++/simple.cc
@@ -2772,10 +2809,10 @@
   echo 'm4_define([AT_PACKAGE_STRING],    [GNU Bison 3.8.2])'; \
   echo 'm4_define([AT_PACKAGE_BUGREPORT], [bug-bison@gnu.org])'; \
 } >tests/package.m4.tmp
-make[5]: Entering directory '/build/bison-3.8.2+dfsg'
 mv tests/package.m4.tmp tests/package.m4
 \
-  /bin/bash '/build/bison-3.8.2+dfsg/build-aux/missing' autom4te --language=autotest -I ./tests ./tests/testsuite.at -o tests/testsuite.tmp
+  /bin/sh '/build/bison-3.8.2+dfsg/build-aux/missing' autom4te --language=autotest -I ./tests ./tests/testsuite.at -o tests/testsuite.tmp
+make[5]: Entering directory '/build/bison-3.8.2+dfsg'
 make[5]: Entering directory '/build/bison-3.8.2+dfsg'
 Making all in po
 make[6]: Entering directory '/build/bison-3.8.2+dfsg/po'
@@ -2793,23 +2830,23 @@
 make[6]: Entering directory '/build/bison-3.8.2+dfsg'
 PASS: examples/c/glr/c++-types.test
 PASS: examples/c/mfcalc/mfcalc.test
-PASS: examples/c/lexcalc/lexcalc.test
+/usr/bin/mkdir -p doc
 PASS: examples/c/pushcalc/calc.test
 PASS: examples/c/calc/calc.test
-/usr/bin/mkdir -p doc
 PASS: examples/c/rpcalc/rpcalc.test
+PASS: examples/c/lexcalc/lexcalc.test
 LC_ALL=C tests/bison --version >doc/bison.help.tmp
 LC_ALL=C tests/bison --help | \
   sed -e 's,^Usage: .*/bison \[OPTION\],Usage: bison [OPTION],g' \
       -e '/translation bugs/d'  >>doc/bison.help.tmp
 ./build-aux/move-if-change doc/bison.help.tmp doc/bison.help
-PASS: examples/c++/variant.test
 make[6]: Leaving directory '/build/bison-3.8.2+dfsg'
-PASS: examples/c++/simple.test
 make[5]: Leaving directory '/build/bison-3.8.2+dfsg'
-PASS: examples/c++/glr/c++-types.test
+PASS: examples/c++/simple.test
+PASS: examples/c++/variant.test
 PASS: examples/c++/variant-11.test
 PASS: examples/c/reccalc/reccalc.test
+PASS: examples/c++/glr/c++-types.test
 PASS: examples/c++/calc++/calc++.test
 ============================================================================
 Testsuite summary for GNU Bison 3.8.2
@@ -2825,7 +2862,7 @@
 make[5]: Leaving directory '/build/bison-3.8.2+dfsg'
 "/usr/bin/perl" -pi -e 's/\@tb\@/\t/g' tests/testsuite.tmp
 mv tests/testsuite.tmp tests/testsuite
-/bin/bash ./tests/testsuite -C tests -j8 --verbose
+/bin/sh ./tests/testsuite -C tests -j8 --verbose
 ## --------------------------- ##
 ## GNU Bison 3.8.2 test suite. ##
 ## --------------------------- ##
@@ -2837,23 +2874,24 @@
 
 
 
-1. m4.at:21: testing Generating Comments ...
-7. input.at:204: testing Yacc warnings ...
-./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -Wyacc input.y
 2. input.at:27: testing Invalid number of arguments ...
 ./input.at:29: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret 
-4. input.at:83: testing Invalid inputs ...
 5. input.at:147: testing Invalid inputs with {} ...
-./input.at:97: "$PERL" -pi -e 's/\\(\d{3})/chr(oct($1))/ge' input.y || exit 77
 3. input.at:58: testing Invalid options ...
 ./input.at:67: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -ferror=caret input.y
-./m4.at:53: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -S ./input.m4 input.y
+1. m4.at:21: testing Generating Comments ...
+4. input.at:83: testing Invalid inputs ...
+./input.at:97: "$PERL" -pi -e 's/\\(\d{3})/chr(oct($1))/ge' input.y || exit 77
 8. input.at:238: testing Yacc's %type ...
-./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -Wyacc input.y
 ./input.at:162: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-./input.at:99: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
 6. input.at:173: testing Yacc warnings on symbols ...
 ./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -Wyacc input.y
+7. input.at:204: testing Yacc warnings ...
+./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -Wyacc input.y
+./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -Wyacc input.y
+./m4.at:53: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -S ./input.m4 input.y
+./input.at:99: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
+./input.at:34: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret 1.y 2.y
 stderr:
 bison: invalid argument 'error=caret' for '--feature'
 Valid arguments are:
@@ -2863,7 +2901,6 @@
   - 'syntax-only'
   - 'all'
 ./input.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --report=error=itemsets input.y
-./input.at:34: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret 1.y 2.y
 stderr:
 5. input.at:147:  ok
 input.y:1.11: error: invalid null character
@@ -2900,6 +2937,7 @@
    10 | %{
       | ^~
 ./input.at:104: "$PERL" -p -e 's{([\0\200\210\360\377])}{sprintf "\\x%02x", ord($1)}ge' stderr
+./input.at:42: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --skeleton
 stderr:
 bison: invalid argument 'error=itemsets' for '--report'
 Valid arguments are:
@@ -2911,18 +2949,15 @@
   - 'counterexamples', 'cex'
   - 'all'
 ./input.at:72: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror?all input.y
-./input.at:42: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --skeleton
-4. input.at:83:  ok
-./m4.at:55: cat output.txt
 stderr:
-stderr:
-
+4. input.at:83:  ok
 bison: option '--skeleton' requires an argument
 Try 'bison --help' for more information.
+
 ./input.at:43: sed -e \
   "s/requires an argument -- skeleton/'--skeleton' requires an argument/" \
   stderr
-1. m4.at:21:  ok
+stderr:
 bison: invalid argument 'error?all' for '--warning'
 Valid arguments are:
   - 'all'
@@ -2938,33 +2973,39 @@
   - 'other'
   - 'precedence'
   - 'yacc'
+./m4.at:55: cat output.txt
 3. input.at:58:  ok
 2. input.at:27:  ok
 9. input.at:287: testing Invalid symbol declarations ...
-
+1. m4.at:21:  ok
 ./input.at:304: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
 
-./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror
-./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror
 
-9. input.at:287:  ok
 
+9. input.at:287:  ok
 10. input.at:341: testing Redefining the error token ...
 ./input.at:354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
-./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror
+
 11. input.at:401: testing Dangling aliases ...
 ./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -Wdangling input.y
-./input.at:390: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
-
-13. input.at:528: testing Invalid $n and @n ...
 12. input.at:427: testing Symbol declarations ...
 ./input.at:467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S./dump-symbols.m4 input.y
+./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror
+13. input.at:528: testing Invalid $n and @n ...
+./input.at:536: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
+./input.at:390: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+
+./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror
+./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror
+13. input.at:528:  ok
 14. input.at:552: testing Type Clashes ...
 ./input.at:565: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
-./input.at:536: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
 14. input.at:552:  ok
+
+
+15. input.at:774: testing Unused values ...
+./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.y
 stderr:
-13. input.at:528:  ok
 input.y:1.1-6: error: POSIX Yacc does not support %nterm [-Werror=yacc]
     1 | %nterm exp
       | ^~~~~~
@@ -2978,31 +3019,13 @@
     4 | exp: "number";
       |      ^~~~~~~~
 ./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wdangling input.y -Werror
-./input.at:182: sed 's,.*/$,,' stderr 1>&2
-
-stderr:
-input.y:1.1-11: error: POSIX Yacc does not support %destructor [-Werror=yacc]
-    1 | %destructor {} <int>
-      | ^~~~~~~~~~~
-input.y:2.1-8: error: POSIX Yacc does not support %printer [-Werror=yacc]
-    2 | %printer {} <int>
-      | ^~~~~~~~
-input.y:6.9-20: error: POSIX Yacc does not support typed midrule actions [-Werror=yacc]
-    6 | a: <int>{ $$ = 42; } { $$ = $1; };
-      |         ^~~~~~~~~~~~
-input.y:7.4-9: error: POSIX Yacc does not support %empty [-Werror=yacc]
-    7 | b: %empty            { $$ = 42; };
-      |    ^~~~~~
-
-./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error
 ./input.at:390: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./input.at:216: sed 's,.*/$,,' stderr 1>&2
 16. input.at:784: testing Unused values before symbol declarations ...
-15. input.at:774: testing Unused values ...
-./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.y
 ./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.y
-./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error
-stderr:
+./input.at:468: cat symbols.csv
+./input.at:182: sed 's,.*/$,,' stderr 1>&2
+12. input.at:427:  ok
+./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error
 stderr:
 input.y:2.1-6: error: POSIX Yacc does not support %nterm [-Werror=yacc]
     2 | %nterm nterm1
@@ -3028,33 +3051,41 @@
 input.y:10.9-16: error: POSIX Yacc does not support string literals [-Werror=yacc]
    10 | nterm3: "TOKEN3"
       |         ^~~~~~~~
+stderr:
+input.y:1.1-11: error: POSIX Yacc does not support %destructor [-Werror=yacc]
+    1 | %destructor {} <int>
+      | ^~~~~~~~~~~
+input.y:2.1-8: error: POSIX Yacc does not support %printer [-Werror=yacc]
+    2 | %printer {} <int>
+      | ^~~~~~~~
+input.y:6.9-20: error: POSIX Yacc does not support typed midrule actions [-Werror=yacc]
+    6 | a: <int>{ $$ = 42; } { $$ = $1; };
+      |         ^~~~~~~~~~~~
+input.y:7.4-9: error: POSIX Yacc does not support %empty [-Werror=yacc]
+    7 | b: %empty            { $$ = 42; };
+      |    ^~~~~~
+
+./input.at:253: sed 's,.*/$,,' stderr 1>&2
+./input.at:216: sed 's,.*/$,,' stderr 1>&2
+./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error
+17. input.at:794: testing Symbol redeclared ...
+./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.y
+./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error
+stderr:
 input.y:2.13-17: error: string literal "bar" not attached to a symbol [-Werror=dangling-alias]
     2 | %type <val> "bar"
       |             ^~~~~
 input.y:4.19-23: error: string literal "baz" not attached to a symbol [-Werror=dangling-alias]
     4 | expr: "foo" "bar" "baz"
       |                   ^~~~~
-./input.at:468: cat symbols.csv
-./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none
-12. input.at:427:  ok
-./input.at:253: sed 's,.*/$,,' stderr 1>&2
 ./input.at:410: sed 's,.*/$,,' stderr 1>&2
-./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error
 ./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wdangling input.y --warnings=error
-
-./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none
-17. input.at:794: testing Symbol redeclared ...
-./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.y
-./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none
+./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none
+./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
 ./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none
+./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none
+./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none
 ./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wdangling input.y -Wnone,none -Werror --trace=none
-6. input.at:173:  ok
-./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
-
-./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none
-18. input.at:832: testing EOF redeclared ...
-./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.y
-./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wdangling input.y --warnings=none -Werror --trace=none
 stderr:
 input.y:1.12-14: error: symbol FOO redeclared [-Werror=other]
     1 | %token FOO FOO
@@ -3074,64 +3105,74 @@
 input.y:3.8-10: note: previous declaration
     3 | %token EOF 0 EOF 0
       |        ^~~
-7. input.at:204:  ok
-./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none
 ./input.at:804: sed 's,.*/$,,' stderr 1>&2
+./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none
 ./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=error
+./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none
+6. input.at:173:  ok
+./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
+./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wdangling input.y --warnings=none -Werror --trace=none
+
+./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
+18. input.at:832: testing EOF redeclared ...
+./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.y
+./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none
+8. input.at:238:  ok
+7. input.at:204:  ok
 
-./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
-19. input.at:859: testing Symbol class redefinition ...
 11. input.at:401:  ok
-./input.at:871: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
+stderr:
+stdout:
+./input.at:391:  $PREPARSER ./input
 
-19. input.at:859:  ok
-./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
+19. input.at:859: testing Symbol class redefinition ...
+./input.at:871: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
 stderr:
-input.y:1.16-18: error: symbol FOO redeclared [-Werror=other]
-    1 | %token FOO BAR FOO 0
-      |                ^~~
-input.y:1.8-10: note: previous declaration
-    1 | %token FOO BAR FOO 0
-      |        ^~~
+./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
+./input.at:391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 
-./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
-20. input.at:899: testing Default %printer and %destructor redeclared ...
+10. input.at:341:  ok
+19. input.at:859: 20. input.at:899: testing Default %printer and %destructor redeclared ...
 ./input.at:959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
-8. input.at:238:  ok
-./input.at:843: sed 's,.*/$,,' stderr 1>&2
+ ok
 21. input.at:970: testing Per-type %printer and %destructor redeclared ...
-./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none
-./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=error
 ./input.at:987: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-./input.at:960: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
-21. input.at:970:  ok
 
-20. input.at:899:  ok
+./input.at:960: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
 
+21. input.at:970:  ok
+./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
 22. input.at:1013: testing Undefined symbols ...
 ./input.at:1023: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
-22. input.at:1013:  ok
 23. input.at:1045: testing Unassociated types used for a printer or destructor ...
-./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.y
+20. input.at:899: ./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.y
+ ok
 
-./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none
-./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
-stderr:
-stdout:
-./input.at:391:  $PREPARSER ./input
+22. input.at:1013:  ok
 
-stderr:
-./input.at:391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 24. input.at:1074: testing Useless printers or destructors ...
-10. input.at:341:  ok
+stderr:
 ./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.y
+
+input.y:1.16-18: error: symbol FOO redeclared [-Werror=other]
+    1 | %token FOO BAR FOO 0
+      |                ^~~
+input.y:1.8-10: note: previous declaration
+    1 | %token FOO BAR FOO 0
+      |        ^~~
 25. input.at:1139: testing Unused values with default %destructor ...
 ./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.y
+./input.at:843: sed 's,.*/$,,' stderr 1>&2
+./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=error
+26. input.at:1187: testing Unused values with per-type %destructor ...
+./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.y
 17. input.at:794:  ok
 
 ./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Werror
+27. input.at:1219: testing Duplicate string ...
+./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -v -o input.c input.y
+./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
 stderr:
-26. input.at:1187: testing Unused values with per-type %destructor ...
 input.y:12.10-32: error: unset value: $$ [-Werror=other]
    12 | a: INT | INT { } INT { } INT { };
       |          ^~~~~~~~~~~~~~~~~~~~~~~
@@ -3237,21 +3278,10 @@
 input.y:26.40-42: error: unset value: $$ [-Werror=other]
    26 | o: INT | INT <integer>{ } INT <integer>{ } INT { $$ = $1 + $2 + $3 + $4 + $5; };
       |                                        ^~~
-./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
-./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.y
+./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
+./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none
 ./input.at:775: sed 's,.*/$,,' stderr 1>&2
-
-./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=error
 stderr:
-27. input.at:1219: testing Duplicate string ...
-input.y:4.22-28: error: type <type3> is used, but is not associated to any symbol [-Werror=other]
-input.y:5.25-31: error: type <type4> is used, but is not associated to any symbol [-Werror=other]
-./input.at:1062: sed 's,.*/$,,' stderr 1>&2
-./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -v -o input.c input.y
-./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
-./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=error
-stderr:
-18. input.at:832:  ok
 input.y:12.10-32: error: unset value: $$ [-Werror=other]
    12 | a: INT | INT { } INT { } INT { };
       |          ^~~~~~~~~~~~~~~~~~~~~~~
@@ -3358,14 +3388,26 @@
    26 | o: INT | INT <integer>{ } INT <integer>{ } INT { $$ = $1 + $2 + $3 + $4 + $5; };
       |                                        ^~~
 ./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Werror
-./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
+./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=error
 ./input.at:785: sed 's,.*/$,,' stderr 1>&2
-
+stderr:
 ./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=error
+input.y:4.22-28: error: type <type3> is used, but is not associated to any symbol [-Werror=other]
+input.y:5.25-31: error: type <type4> is used, but is not associated to any symbol [-Werror=other]
+./input.at:1062: sed 's,.*/$,,' stderr 1>&2
 ./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v -o input.c input.y -Werror
-28. input.at:1247: testing Token collisions ...
-./input.at:1256: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
+./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=error
 stderr:
+input.y:6.8-45: error: unset value: $$ [-Werror=other]
+    6 | start: end end tagged tagged { $<tag>1; $3; } ;
+      |        ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+input.y:6.12-14: error: unused value: $2 [-Werror=other]
+    6 | start: end end tagged tagged { $<tag>1; $3; } ;
+      |            ^~~
+input.y:7.6-8: error: unset value: $$ [-Werror=other]
+    7 | end: { } ;
+      |      ^~~
+./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
 stderr:
 input.y:6.8-22: error: unset value: $$ [-Werror=other]
     6 | start: end end { $1; } ;
@@ -3376,129 +3418,115 @@
 input.y:7.6-8: error: unset value: $$ [-Werror=other]
     7 | end: { }  ;
       |      ^~~
-input.y:6.8-45: error: unset value: $$ [-Werror=other]
-    6 | start: end end tagged tagged { $<tag>1; $3; } ;
-      |        ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-input.y:6.12-14: error: unused value: $2 [-Werror=other]
-    6 | start: end end tagged tagged { $<tag>1; $3; } ;
-      |            ^~~
-input.y:7.6-8: error: unset value: $$ [-Werror=other]
-    7 | end: { } ;
-      |      ^~~
-28. input.at:1247:  ok
-./input.at:1199: sed 's,.*/$,,' stderr 1>&2
-./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
 ./input.at:1152: sed 's,.*/$,,' stderr 1>&2
-./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=error
-
 ./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=error
-29. input.at:1275: testing Incompatible Aliases ...
-./input.at:1285: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
-stderr:
-input.y:6.11-14: error: symbol "<=" used more than once as a literal string [-Werror=other]
+./input.at:1199: sed 's,.*/$,,' stderr 1>&2
+./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=error
 stderr:
 input.y:16.13-19: error: useless %printer for type <type1> [-Werror=other]
 input.y:17.16-22: error: useless %destructor for type <type2> [-Werror=other]
-./input.at:1236: sed 's,.*/$,,' stderr 1>&2
 ./input.at:1085: sed 's,.*/$,,' stderr 1>&2
-./input.at:1299: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
-./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v -o input.c input.y --warnings=error
+stderr:
+input.y:6.11-14: error: symbol "<=" used more than once as a literal string [-Werror=other]
 ./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=error
-./input.at:1313: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
+./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
+18. input.at:832:  ok
+./input.at:1236: sed 's,.*/$,,' stderr 1>&2
+
+./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none
+./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v -o input.c input.y --warnings=error
 ./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none
+28. input.at:1247: testing Token collisions ...
+./input.at:1256: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
+28. input.at:1247:  ok
+
 ./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none
+29. input.at:1275: testing Incompatible Aliases ...
+./input.at:1285: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
+./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
+./input.at:1299: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
+./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
+./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v -o input.c input.y -Wnone,none -Werror --trace=none
+./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
+./input.at:1313: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
 ./input.at:1327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
-./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none
 ./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none
 ./input.at:1344: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
+./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none
 ./input.at:1359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
-./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
-./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v -o input.c input.y -Wnone,none -Werror --trace=none
-./input.at:1374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
-./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
 23. input.at:1045:  ok
-29. input.at:1275:  ok
-
-./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
+./input.at:1374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
+./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.y
 
+26. input.at:1187:  ok
+./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v -o input.c input.y --warnings=none -Werror --trace=none
+29. input.at:1275:  ok
 30. input.at:1400: testing Torturing the Scanner ...
 ./input.at:1407: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret  input.y
-./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.y
-./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none
+
+
 31. input.at:1569: testing Typed symbol aliases ...
-./input.at:1586: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 ./input.at:1554: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -v -o input.c input.y
-26. input.at:1187:  ok
-./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v -o input.c input.y --warnings=none -Werror --trace=none
+./input.at:1586: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 ./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none
-
 32. input.at:1609: testing Require 1.0 ...
 ./input.at:1609: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 ./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Werror
+27. input.at:1219:  ok
 31. input.at:1569:  ok
+stderr:
+
+32. input.at:1609:  ok
 
-./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
 33. input.at:1610: testing Require 3.8.2 ...
 ./input.at:1610: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+
+34. input.at:1612: testing Require 100.0 ...
+./input.at:1612: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y
 stderr:
+input.y:9.10-16: error: require bison 100.0, but have 3.8.2
+34. input.at:1612:  ok
+./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.y
+35. input.at:1619: testing String aliases for character tokens ...
+./input.at:1632: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 stderr:
-./input.at:1555: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
-27. input.at:1219:  ok
-32. input.at:1609:  ok
 input.y:6.23-28: error: unused value: $4 [-Werror=other]
 input.y:8.9-11: error: unset value: $$ [-Werror=other]
 ./input.at:1175: sed 's,.*/$,,' stderr 1>&2
-./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.y
-
+./input.at:1555: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
 
 ./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=error
+36. input.at:1642: testing Symbols ...
+./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --yacc input.y
 stderr:
-34. input.at:1612: testing Require 100.0 ...
 33. input.at:1610:  ok
-./input.at:1612: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y
-35. input.at:1619: testing String aliases for character tokens ...
-./input.at:1632: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-stderr:
-input.y:9.10-16: error: require bison 100.0, but have 3.8.2
-34. input.at:1612:  ok
-./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
-
+./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
 
-./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
-./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Werror
-36. input.at:1642: testing Symbols ...
-./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --yacc input.y
+35. input.at:1619:  ok
+./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
 37. input.at:1708: testing Numbered tokens ...
+./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Werror
 ./input.at:1720: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret redecl.y
-stderr:
-35. input.at:1619:  ok
-input.y:3.13-14: error: useless %printer for type <> [-Werror=other]
-./input.at:1116: sed 's,.*/$,,' stderr 1>&2
 
-./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=error
+./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
 38. input.at:1750: testing Unclosed constructs ...
 ./input.at:1779: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y
 38. input.at:1750:  ok
+./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --yacc input.y -Werror
 
-./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none
-./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
-./input.at:1735: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret too-large.y
+stderr:
+input.y:3.13-14: error: useless %printer for type <> [-Werror=other]
 39. input.at:1805: testing %start after first rule ...
+./input.at:1116: sed 's,.*/$,,' stderr 1>&2
+./input.at:1735: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret too-large.y
 ./input.at:1817: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --yacc input.y -Werror
+./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=error
+./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none
 37. input.at:1708:  ok
 
-25. input.at:1139:  ok
-./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none
 40. input.at:1826: testing Duplicate %start symbol ...
 ./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.y
-
-39. input.at:1805:  ok
-./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y
 stderr:
-41. input.at:1895: testing %prec takes a token ...
-./input.at:1905: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-
 input.y:1.1-5: error: POSIX Yacc does not support %code [-Werror=yacc]
 input.y:9.8-16: error: POSIX Yacc forbids dashes in symbol names: WITH-DASH [-Werror=yacc]
 input.y:10.21-34: error: POSIX Yacc does not support string literals [-Werror=yacc]
@@ -3507,17 +3535,26 @@
 input.y:20.8-16: error: POSIX Yacc forbids dashes in symbol names: with-dash [-Werror=yacc]
 input.y:22.15-28: error: POSIX Yacc does not support string literals [-Werror=yacc]
 input.y:24.17-32: error: POSIX Yacc does not support string literals [-Werror=yacc]
-41. input.at:1895:  ok
+39. input.at:1805:  ok
 ./input.at:1666: sed 's,.*/$,,' stderr 1>&2
-./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y
-42. input.at:1916: testing %prec's token must be defined ...
-./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.y
-./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
+./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
 ./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --yacc input.y --warnings=error
 
+41. input.at:1895: testing %prec takes a token ...
+25. input.at:1139:  ok
+./input.at:1905: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+41. input.at:1895:  ok
+
+./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
+
+42. input.at:1916: testing %prec's token must be defined ...
+./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.y
+./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y
 43. input.at:1936: testing Reject unused %code qualifiers ...
 ./input.at:1946: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input-c.y
-./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.y
+./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none
+./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y
+./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --yacc input.y -Wnone,none -Werror --trace=none
 stderr:
 input.y:1.12-14: error: duplicate directive [-Werror=other]
     1 | %start exp exp exp
@@ -3532,48 +3569,57 @@
     1 | %start exp exp exp
       |        ^~~
 input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
-./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Werror
 ./input.at:1836: sed 's,.*/$,,' stderr 1>&2
+./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Werror
 ./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=error
-./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --yacc input.y -Wnone,none -Werror --trace=none
 stderr:
-input.y:2.8-17: error: token for %prec is not defined: PREC [-Werror=other]
-./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Werror
-./input.at:1925: sed 's,.*/$,,' stderr 1>&2
+stdout:
+./input.at:1556: $CC $CFLAGS $CPPFLAGS  -c -o main.o main.c 
+./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.y
 ./input.at:1960: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input-c-glr.y
+./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --yacc input.y --warnings=none -Werror --trace=none
+stderr:
+input.y:2.8-17: error: token for %prec is not defined: PREC [-Werror=other]
 ./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none
+./input.at:1925: sed 's,.*/$,,' stderr 1>&2
+./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=error
+./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Werror
+./input.at:1973: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input-c++.y
 stderr:
 stdout:
-./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=error
-./input.at:1556: $CC $CFLAGS $CPPFLAGS  -c -o main.o main.c 
-./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --yacc input.y --warnings=none -Werror --trace=none
-./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Werror
+./input.at:1557: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.o main.o $LIBS
 ./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
+./input.at:1678: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
 stderr:
 input.y:2.16-18: error: useless %printer for type <*> [-Werror=other]
 ./input.at:1124: sed 's,.*/$,,' stderr 1>&2
+./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Werror
+stderr:
+stdout:
+./input.at:1558:  $PREPARSER ./input
 ./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=error
-./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
-./input.at:1973: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input-c++.y
+stderr:
 ./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.y
-./input.at:1678: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+./input.at:1986: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input-c++-glr.y
+./input.at:1558: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Werror
-stderr:
-stdout:
-./input.at:1557: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.o main.o $LIBS
-./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
+30. input.at:1400:  ok
+./input.at:1681: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
 ./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none
+
+44. input.at:2025: testing Multiple %code ...
+./input.at:2054: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
+./input.at:1999: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret special-char-@@.y
 ./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
-./input.at:1681: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
-./input.at:1986: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input-c++-glr.y
 42. input.at:1916:  ok
-./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none
-stderr:
-stdout:
-./input.at:1558:  $PREPARSER ./input
-stderr:
 
-./input.at:1558: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./input.at:2054: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+45. input.at:2065: testing errors ...
+./input.at:2077: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input-redefined.y
+./input.at:2091: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input-unused.y
+./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none
 stderr:
 input.y:1.16-18: error: duplicate directive [-Werror=other]
     1 | %start exp foo exp
@@ -3582,11 +3628,17 @@
     1 | %start exp foo exp
       |        ^~~
 input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
-30. input.at:1400:  ok
-44. input.at:2025: testing Multiple %code ...
-./input.at:2054: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+./input.at:2012: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret special-char-].y
 ./input.at:1859: sed 's,.*/$,,' stderr 1>&2
-./input.at:1999: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret special-char-@@.y
+stderr:
+./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=error
+stdout:
+./input.at:1694: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y
+36. input.at:1642:  ok
+45. input.at:2065:  ok
+
+24. input.at:1074:  ok
+
 stderr:
 input.y:12.10-32: error: unset value: $$ [-Werror=other]
    12 | a: INT | INT { } INT { } INT { };
@@ -3717,23 +3769,17 @@
 input.y:26.40-42: error: unset value: $$ [-Werror=other]
    26 | o: INT | INT <integer>{ } INT <integer>{ } INT { $$ = $1 + $2 + $3 + $4 + $5; };
       |                                        ^~~
-
-./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=error
-./input.at:776: sed 's,.*/$,,' stderr 1>&2
-24. input.at:1074:  ok
-45. input.at:2065: testing errors ...
-./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=error
-./input.at:2077: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input-redefined.y
-./input.at:2054: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./input.at:2091: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input-unused.y
-
 46. input.at:2102: testing %define, --define, --force-define ...
 ./input.at:2118: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dvar-dd=cmd-d1 -Dvar-dd=cmd-d2 \
                  -Fvar-ff=cmd-f1 -Fvar-ff=cmd-f2 \
                  -Dvar-dfg=cmd-d -Fvar-dfg=cmd-f \
                  -Fvar-fd=cmd-f -Dvar-fd=cmd-d   \
                  --skeleton ./skel.c input.y
-stderr:
+./input.at:776: sed 's,.*/$,,' stderr 1>&2
+
+47. input.at:2170: testing "%define" Boolean variables ...
+./input.at:2180: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret Input.y
+43. input.at:1936:  ok
 stderr:
 input.y:12.10-32: error: unset value: $$ [-Werror=other]
    12 | a: INT | INT { } INT { } INT { };
@@ -3864,197 +3910,157 @@
 input.y:26.40-42: error: unset value: $$ [-Werror=other]
    26 | o: INT | INT <integer>{ } INT <integer>{ } INT { $$ = $1 + $2 + $3 + $4 + $5; };
       |                                        ^~~
-stdout:
 ./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none
-./input.at:786: sed 's,.*/$,,' stderr 1>&2
-./input.at:1694: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y
-./input.at:2012: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret special-char-].y
-./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=error
+./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=error
 ./input.at:2123: cat input.tab.c
-36. input.at:1642:  ok
-./input.at:2135: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dvar=cmd-d input-dg.y
-
-45. input.at:2065:  ok
-./input.at:2146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Dvar=cmd-d input-dg.y
-47. input.at:2170: testing "%define" Boolean variables ...
-./input.at:2180: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret Input.y
-./input.at:2158: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dunused-d -Funused-f input-unused.y
+./input.at:786: sed 's,.*/$,,' stderr 1>&2
 
-43. input.at:1936:  ok
 48. input.at:2191: testing "%define" code variables ...
 ./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.yy
-./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
-
-47. input.at:2170:  ok
+./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=error
+./input.at:2135: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dvar=cmd-d input-dg.y
 49. input.at:2224: testing "%define" keyword variables ...
 ./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.y
+./input.at:2146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Dvar=cmd-d input-dg.y
+47. input.at:2170:  ok
+./input.at:2158: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dunused-d -Funused-f input-unused.y
 
+./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
 50. input.at:2257: testing "%define" enum variables ...
 ./input.at:2269: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
-46. input.at:2102:  ok
-./input.at:2284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
-
-./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Wnone,none -Werror --trace=none
 ./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.yy -Werror
+./input.at:2284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
+./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Werror
 ./input.at:2303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
+46. input.at:2102:  ok
+
 ./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.y
 51. input.at:2320: testing "%define" file variables ...
 ./input.at:2329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
-./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Werror
-./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Wnone,none -Werror --trace=none
 stderr:
-input.yy:2.1-30: error: %define variable 'api.location.type' requires '{...}' values [-Werror=deprecated]
-input.yy:4.1-30: error: %define variable 'api.prefix' requires '{...}' values [-Werror=deprecated]
-input.yy:5.1-30: error: %define variable 'api.token.prefix' requires '{...}' values [-Werror=deprecated]
-input.yy:3.1-30: error: %define variable 'api.namespace' requires '{...}' values [-Werror=deprecated]
-./input.at:2213: sed 's,.*/$,,' stderr 1>&2
-51. input.at:2320:  ok
+stdout:
 stderr:
-50. input.at:2257:  ok
+./input.at:2055:  $PREPARSER ./input
 input.y:5.1-40: error: %define variable 'lr.type' requires keyword values [-Werror=deprecated]
 input.y:3.1-40: error: %define variable 'lr.default-reduction' requires keyword values [-Werror=deprecated]
 input.y:4.1-40: error: %define variable 'lr.keep-unreachable-state' requires keyword values [-Werror=deprecated]
 input.y:1.1-38: error: %define variable 'api.pure' requires keyword values [-Werror=deprecated]
 input.y:2.1-40: error: %define variable 'api.push-pull' requires keyword values [-Werror=deprecated]
-./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
+stderr:
+stderr:
+./input.at:2055: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input.yy:2.1-30: error: %define variable 'api.location.type' requires '{...}' values [-Werror=deprecated]
+input.yy:4.1-30: error: %define variable 'api.prefix' requires '{...}' values [-Werror=deprecated]
+input.yy:5.1-30: error: %define variable 'api.token.prefix' requires '{...}' values [-Werror=deprecated]
+input.yy:3.1-30: error: %define variable 'api.namespace' requires '{...}' values [-Werror=deprecated]
+./input.at:2246: sed 's,.*/$,,' stderr 1>&2
+44. input.at:2025:  ok
+./input.at:2213: sed 's,.*/$,,' stderr 1>&2
+./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=error
+50. input.at:2257:  ok
 ./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.yy --warnings=error
 
-./input.at:2246: sed 's,.*/$,,' stderr 1>&2
 
-./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=error
 52. input.at:2342: testing "%define" backward compatibility ...
 ./input.at:2355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
-53. input.at:2393: testing Unused api.pure ...
+./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
+51. input.at:2320: 53. input.at:2393: testing Unused api.pure ...
+ ok
 ./input.at:2413: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
 52. input.at:2342:  ok
+./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Wnone,none -Werror --trace=none
 
+
+./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Wnone,none -Werror --trace=none
 54. input.at:2429: testing C++ namespace reference errors ...
 ./input.at:2450: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-stderr:
-stdout:
-./input.at:2055:  $PREPARSER ./input
-./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=none -Werror --trace=none
-stderr:
-stderr:
-input.y:2.8-10: error: duplicate directive [-Werror=other]
-    2 | %start exp
-      |        ^~~
-input.y:1.8-10: note: previous declaration
-    1 | %start exp foo
-      |        ^~~
-input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
-./input.at:2055: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-44. input.at:2025:  ok
-./input.at:1877: sed 's,.*/$,,' stderr 1>&2
-./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.yy -Wnone,none -Werror --trace=none
-./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
-./input.at:2414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-
-./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=error
 55. input.at:2482: testing Bad character literals ...
 ./input.at:2484:
 set x `LC_ALL=C ls -l 'empty.y'` &&
   size=$6 &&
   { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='empty.y'; } || exit 77
-./input.at:2452: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.yy --warnings=none -Werror --trace=none
-./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=none -Werror --trace=none
+./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
 stderr:
 0+0 records in
 0+0 records out
-0 bytes copied, 9.3961e-05 s, 0.0 kB/s
+0 bytes copied, 6.42e-05 s, 0.0 kB/s
 stdout:
 ./input.at:2490: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret empty.y
+./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.yy -Wnone,none -Werror --trace=none
 ./input.at:2508:
 set x `LC_ALL=C ls -l 'two.y'` &&
   size=$6 &&
   { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='two.y'; } || exit 77
-./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none
-./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none
 stderr:
-./input.at:2415: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+input.y:2.8-10: error: duplicate directive [-Werror=other]
+    2 | %start exp
+      |        ^~~
+input.y:1.8-10: note: previous declaration
+    1 | %start exp foo
+      |        ^~~
+input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
+./input.at:2414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+stderr:
 0+0 records in
 0+0 records out
-0 bytes copied, 8.6761e-05 s, 0.0 kB/s
+0 bytes copied, 6.686e-05 s, 0.0 kB/s
 stdout:
-./input.at:2454: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+./input.at:1877: sed 's,.*/$,,' stderr 1>&2
 ./input.at:2514: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret two.y
+./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=error
 ./input.at:2522:
 set x `LC_ALL=C ls -l 'three.y'` &&
   size=$6 &&
   { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='three.y'; } || exit 77
+./input.at:2452: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
 stderr:
 0+0 records in
 0+0 records out
-0 bytes copied, 8.236e-05 s, 0.0 kB/s
+0 bytes copied, 5.852e-05 s, 0.0 kB/s
 stdout:
-./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.yy
 ./input.at:2528: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret three.y
-./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.y
+./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none
 55. input.at:2482:  ok
+./input.at:2415: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
 
-./input.at:2456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.yy --warnings=none -Werror --trace=none
+./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none
 56. input.at:2543: testing Bad escapes in literals ...
 ./input.at:2556: "$PERL" -e 'print "start: \"\\\t\\\f\\\0\\\1\" ;";' >> input.y || exit 77
-./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
 ./input.at:2558: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.yy -Werror
+./input.at:2454: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
 56. input.at:2543:  ok
+./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.y
 
-./input.at:2416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Werror
-15. input.at:774:  ok
 57. input.at:2582: testing Unexpected end of file ...
 ./input.at:2586: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
+./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
 ./input.at:2591:
 set x `LC_ALL=C ls -l 'char.y'` &&
   size=$6 &&
   { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='char.y'; } || exit 77
-40. input.at:1826:  ok
-16. input.at:784:  ok
-
-
+./input.at:2416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=none -Werror --trace=none
+./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.yy
 stderr:
 0+0 records in
 0+0 records out
-0 bytes copied, 8.4421e-05 s, 0.0 kB/s
+0 bytes copied, 6.324e-05 s, 0.0 kB/s
 stdout:
+./input.at:2456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
 ./input.at:2594: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret char.y
-./input.at:2458: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-
-stderr:
-input.yy:2.1-32: error: %define variable 'api.location.type' requires '{...}' values [-Werror=deprecated]
-input.yy:4.1-32: error: %define variable 'api.prefix' requires '{...}' values [-Werror=deprecated]
-input.yy:5.1-32: error: %define variable 'api.token.prefix' requires '{...}' values [-Werror=deprecated]
-input.yy:3.1-32: error: %define variable 'api.namespace' requires '{...}' values [-Werror=deprecated]
-58. input.at:2675: testing LAC: Errors for %define ...
-./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Syacc.c -Dparse.lac=none input.y
-60. input.at:2764: testing %name-prefix and api.prefix are incompatible ...
 ./input.at:2604:
 set x `LC_ALL=C ls -l 'escape-in-char.y'` &&
   size=$6 &&
   { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='escape-in-char.y'; } || exit 77
-59. input.at:2719: testing -Werror combinations ...
-./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall input.y
-./input.at:2214: sed 's,.*/$,,' stderr 1>&2
-./input.at:2779: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret  -Wno-deprecated input.y
-stderr:
-./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.yy --warnings=error
+./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=none -Werror --trace=none
 stderr:
 0+0 records in
 0+0 records out
-0 bytes copied, 9.2001e-05 s, 0.0 kB/s
+0 bytes copied, 6.084e-05 s, 0.0 kB/s
 stdout:
+./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Werror
 ./input.at:2607: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret escape-in-char.y
-input.y:5.1-40: error: %define variable 'lr.type' requires keyword values [-Werror=deprecated]
-input.y:3.1-40: error: %define variable 'lr.default-reduction' requires keyword values [-Werror=deprecated]
-input.y:4.1-40: error: %define variable 'lr.keep-unreachable-state' requires keyword values [-Werror=deprecated]
-input.y:1.1-38: error: %define variable 'api.pure' requires keyword values [-Werror=deprecated]
-input.y:2.1-40: error: %define variable 'api.push-pull' requires keyword values [-Werror=deprecated]
-./input.at:2460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-./input.at:2247: sed 's,.*/$,,' stderr 1>&2
-./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Syacc.c -Dparse.lac=full input.y
-./input.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
 ./input.at:2617:
 set x `LC_ALL=C ls -l 'string.y'` &&
   size=$6 &&
@@ -4062,104 +4068,117 @@
 stderr:
 0+0 records in
 0+0 records out
-0 bytes copied, 9.156e-05 s, 0.0 kB/s
+0 bytes copied, 6.106e-05 s, 0.0 kB/s
 stdout:
-./input.at:2620: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret string.y
-./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=error
-./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall input.y -Werror
+40. input.at:1826: ./input.at:2620: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret string.y
+ ok
+./input.at:2458: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
 ./input.at:2630:
 set x `LC_ALL=C ls -l 'escape-in-string.y'` &&
   size=$6 &&
   { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='escape-in-string.y'; } || exit 77
-./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.yy -Wnone,none -Werror --trace=none
+
+./input.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.yy -Werror
 stderr:
-./input.at:2462: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
 0+0 records in
 0+0 records out
-0 bytes copied, 9.0061e-05 s, 0.0 kB/s
+0 bytes copied, 6.388e-05 s, 0.0 kB/s
 stdout:
-./input.at:2780: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dapi.prefix={foo} -p bar -Wno-deprecated input.y
 ./input.at:2633: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret escape-in-string.y
-./input.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Syacc.c -Dparse.lac=unsupported input.y
+58. input.at:2675: testing LAC: Errors for %define ...
+./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Syacc.c -Dparse.lac=none input.y
+stderr:
+input.y:5.1-40: error: %define variable 'lr.type' requires keyword values [-Werror=deprecated]
+input.y:3.1-40: error: %define variable 'lr.default-reduction' requires keyword values [-Werror=deprecated]
+input.y:4.1-40: error: %define variable 'lr.keep-unreachable-state' requires keyword values [-Werror=deprecated]
+input.y:1.1-38: error: %define variable 'api.pure' requires keyword values [-Werror=deprecated]
+input.y:2.1-40: error: %define variable 'api.push-pull' requires keyword values [-Werror=deprecated]
 ./input.at:2643:
 set x `LC_ALL=C ls -l 'tstring.y'` &&
   size=$6 &&
   { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='tstring.y'; } || exit 77
+./input.at:2247: sed 's,.*/$,,' stderr 1>&2
 stderr:
 0+0 records in
 0+0 records out
-0 bytes copied, 8.308e-05 s, 0.0 kB/s
+0 bytes copied, 5.816e-05 s, 0.0 kB/s
 stdout:
-stderr:
-input.y:2.15: error: stray '$' [-Werror=other]
 ./input.at:2646: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret tstring.y
-./input.at:2727: sed 's,.*/$,,' stderr 1>&2
+./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=error
 ./input.at:2656:
 set x `LC_ALL=C ls -l 'escape-in-tstring.y'` &&
   size=$6 &&
   { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='escape-in-tstring.y'; } || exit 77
-./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
-./input.at:2781: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dapi.prefix={foo} -Wno-deprecated input.y
-./input.at:2465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall input.y --warnings=error
+./input.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+./input.at:2460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
 stderr:
 0+0 records in
 0+0 records out
-0 bytes copied, 7.5421e-05 s, 0.0 kB/s
+0 bytes copied, 6.882e-05 s, 0.0 kB/s
 stdout:
 ./input.at:2659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret escape-in-tstring.y
-./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.yy --warnings=none -Werror --trace=none
-53. input.at:2393: 57. input.at:2582:  ok
- ok
-./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=none input.y
+./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Syacc.c -Dparse.lac=full input.y
+stderr:
+input.yy:2.1-32: error: %define variable 'api.location.type' requires '{...}' values [-Werror=deprecated]
+input.yy:4.1-32: error: %define variable 'api.prefix' requires '{...}' values [-Werror=deprecated]
+input.yy:5.1-32: error: %define variable 'api.token.prefix' requires '{...}' values [-Werror=deprecated]
+input.yy:3.1-32: error: %define variable 'api.namespace' requires '{...}' values [-Werror=deprecated]
+57. input.at:2582:  ok
+./input.at:2214: sed 's,.*/$,,' stderr 1>&2
+./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.yy --warnings=error
+
+./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
+53. input.at:2393:  ok
+59. input.at:2719: testing -Werror combinations ...
+./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall input.y
+./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Syacc.c -Dparse.lac=unsupported input.y
 
+./input.at:2462: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+15. input.at:774:  ok
+60. input.at:2764: testing %name-prefix and api.prefix are incompatible ...
+./input.at:2779: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret  -Wno-deprecated input.y
 
-./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall input.y -Wnone,none -Werror --trace=none
-./input.at:2467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none
-62. input.at:2840: testing Stray $ or @ ...
+16. input.at:784:  ok
 61. input.at:2793: testing Redefined %union name ...
 ./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.y
-./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=full input.y
+./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.yy -Wnone,none -Werror --trace=none
+./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall input.y -Werror
+
+./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none
+./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=none input.y
+62. input.at:2840: testing Stray $ or @ ...
 ./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall input.y
-./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall input.y --warnings=none -Werror --trace=none
-./input.at:2782: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -p bar -Wno-deprecated input.y
-./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=unsupported input.y
-./input.at:2469: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-48. input.at:2191:  ok
+./input.at:2465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+./input.at:2780: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dapi.prefix={foo} -p bar -Wno-deprecated input.y
 ./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Werror
-60. input.at:2764:  ok
-./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -W input.y
-
+stderr:
+input.y:2.15: error: stray '$' [-Werror=other]
+./input.at:2727: sed 's,.*/$,,' stderr 1>&2
+./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=full input.y
+./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.yy --warnings=none -Werror --trace=none
+./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall input.y --warnings=error
 49. input.at:2224:  ok
+./input.at:2467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
 ./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall input.y -Werror
+./input.at:2781: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dapi.prefix={foo} -Wno-deprecated input.y
 
-
-./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Slalr1.d -Dparse.lac=none input.y
-63. input.at:2883: testing Code injection ...
-64. input.at:2946: testing Deprecated directives ...
-65. input.at:3077: testing Unput's effect on locations ...
-./input.at:3092: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S yacc.c -d input.y
-./input.at:3019: cp errors-all experr
-./input.at:3020: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -ffixit input.y
 stderr:
 input.y:3.8-10: error: %define variable 'api.value.union.name' redefined [-Werror=other]
 input.y:1.8-10: note: previous definition
 input.y:4.1-32: error: %define variable 'api.value.union.name' redefined [-Werror=other]
 input.y:3.8-10: note: previous definition
 input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
-54. input.at:2429:  ok
-65. input.at:3077:  ok
-
 ./input.at:2808: sed 's,.*/$,,' stderr 1>&2
-./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -W input.y -Werror
-
 ./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=error
+./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=unsupported input.y
+63. input.at:2883: testing Code injection ...
+./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S yacc.c -d input.y
+./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall input.y -Wnone,none -Werror --trace=none
+./input.at:2469: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+./input.at:2782: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -p bar -Wno-deprecated input.y
+48. input.at:2191:  ok
 stderr:
-66. input.at:3113: testing Non-deprecated directives ...
-./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.y
 input.y:11.19: error: stray '$' [-Werror=other]
 input.y:11.23: error: stray '@' [-Werror=other]
 input.y:12.19: error: stray '$' [-Werror=other]
@@ -4169,308 +4188,306 @@
 input.y:16.19: error: stray '$' [-Werror=other]
 input.y:16.23: error: stray '@' [-Werror=other]
 input.y:17.19: error: stray '$' [-Werror=other]
-67. input.at:3148: testing Cannot type action ...
-./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.y
-./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Slalr1.d -Dparse.lac=full input.y
 ./input.at:2861: sed 's,.*/$,,' stderr 1>&2
-./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S glr.c -d input.y
+
+./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
 ./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall input.y --warnings=error
+./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S glr.c -d input.y
+./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall input.y --warnings=none -Werror --trace=none
+64. input.at:2946: testing Deprecated directives ...
+./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Slalr1.d -Dparse.lac=none input.y
+./input.at:3019: cp errors-all experr
+./input.at:3020: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -ffixit input.y
+60. input.at:2764:  ok
+54. input.at:2429:  ok
+./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none
+
+
+./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Slalr1.d -Dparse.lac=full input.y
+./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -W input.y
+65. input.at:3077: testing Unput's effect on locations ...
+./input.at:3092: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+66. input.at:3113: testing Non-deprecated directives ...
+./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.y
+./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S lalr1.cc -d input.y
+./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall input.y -Wnone,none -Werror --trace=none
+65. input.at:3077:  ok
 ./input.at:3022: sed -e '/^fix-it:/d' errors-all >experr
-stderr:
-input.y:2.15: error: stray '$' [-Werror=other]
-./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Slalr1.d -Dparse.lac=unsupported input.y
 ./input.at:3023: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.y
-./input.at:2730: sed 's,.*/$,,' stderr 1>&2
-./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
+
+./input.at:2820: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+67. input.at:3148: testing Cannot type action ...
+./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.y
+./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Slalr1.d -Dparse.lac=unsupported input.y
 ./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Werror
-./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -W input.y --warnings=error
-./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
-./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S lalr1.cc -d input.y
+./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S glr.cc -d input.y
+./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -W input.y -Werror
+./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall input.y --warnings=none -Werror --trace=none
 ./input.at:3027: rm -f output.c
-stderr:
-input.y:10.6-13: error: only midrule actions can be typed: int [-Werror=other]
-   10 | exp: <int> {}
-      |      ^~~~~~~~
 ./input.at:3028: cp input.y input.y.orig
 ./input.at:3029: sed -e '/fix-it/d' <errors-all >experr
+./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
 ./input.at:3030: echo "bison: file 'input.y' was updated (backup: 'input.y~')" >>experr
-./input.at:3156: sed 's,.*/$,,' stderr 1>&2
 ./input.at:3031: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --update input.y
-./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall input.y -Wnone,none -Werror --trace=none
-./input.at:3034: diff input.y.orig input.y~
 ./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Slalr1.java -Dparse.lac=none input.y
+./input.at:2825: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+./input.at:3034: diff input.y.orig input.y~
 ./input.at:3037: test ! -f output.c
-./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=error
+./input.at:3040: sed -e '1,8d' input.y
 stderr:
 input.y:14.1-15.5: error: duplicate directive: '%file-prefix' [-Werror=other]
 input.y:13.1-18: note: previous declaration
 input.y: error: %expect-rr applies only to GLR parsers [-Werror=other]
 input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
-./input.at:3040: sed -e '1,8d' input.y
-./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -W input.y -Wnone,none -Werror --trace=none
+stderr:
 ./input.at:3062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.y
+input.y:2.15: error: stray '$' [-Werror=other]
 ./input.at:3133: sed 's,.*/$,,' stderr 1>&2
-./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none
+./input.at:2730: sed 's,.*/$,,' stderr 1>&2
 ./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=error
+./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -W input.y --warnings=error
+stderr:
+input.y:10.6-13: error: only midrule actions can be typed: int [-Werror=other]
+   10 | exp: <int> {}
+      |      ^~~~~~~~
+62. input.at:2840:  ok
 ./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Slalr1.java -Dparse.lac=full input.y
-./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none
-./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -W input.y --warnings=none -Werror --trace=none
-./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S glr.cc -d input.y
-./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall input.y --warnings=none -Werror --trace=none
-./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Slalr1.java -Dparse.lac=unsupported input.y
-64. input.at:2946:  ok
-./input.at:2820: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
+./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S glr2.cc -d input.y
+./input.at:3156: sed 's,.*/$,,' stderr 1>&2
 
-./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
+./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=error
+61. input.at:2793:  ok
 68. input.at:3171: testing Character literals and api.token.raw ...
 ./input.at:3181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y
-68. input.at:3171:  ok
-./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-none input.y
-./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S glr2.cc -d input.y
-62. input.at:2840:  ok
+64. input.at:2946:  ok
 
-./input.at:2825: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-67. input.at:3148:  ok
-./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none
+68. input.at:3171:  ok
+./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Slalr1.java -Dparse.lac=unsupported input.y
+./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
+./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -W input.y -Wnone,none -Werror --trace=none
 
-./input.at:2697: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y
 69. input.at:3205: testing %token-table and parse.error ...
+
 ./input.at:3220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
 70. input.at:3231: testing Invalid file prefix mapping arguments ...
 ./input.at:3246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -M foo input.y
-
-./input.at:3247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --file-prefix-map foo input.y
 71. named-refs.at:22: testing Tutorial calculator ...
+./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none
 ./named-refs.at:184: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
-./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-none input.y -Werror
 ./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S lalr1.d  input.y
+./input.at:3247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --file-prefix-map foo input.y
 ./input.at:3248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -M foo=bar -M baz input.y
-66. input.at:3113:  ok
-61. input.at:2793:  ok
+./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none
 ./input.at:3249: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -M foo= -M baz input.y
-./input.at:2704: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y
-
+./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -W input.y --warnings=none -Werror --trace=none
+./input.at:2697: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y
 70. input.at:3231:  ok
-
 ./input.at:3221: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
+
+./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S lalr1.java  input.y
 72. named-refs.at:196: testing Undefined and ambiguous references ...
+./named-refs.at:254: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o test.c test.y
+72. named-refs.at:196:  ok
+66. input.at:3113:  ok
+./named-refs.at:184: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
+./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-none input.y
+./input.at:2704: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y
+
+
+67. input.at:3148: 69. input.at:3205:  ok
+ ok
+./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S yacc.c -d input.y
 73. named-refs.at:297: testing Misleading references ...
 ./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o test.c test.y
-./named-refs.at:254: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o test.c test.y
 
-./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S lalr1.java  input.y
-stderr:
-input.y:2.15: error: stray '$' [-Werror=other]
-72. named-refs.at:196:  ok
-./input.at:2733: sed 's,.*/$,,' stderr 1>&2
 74. named-refs.at:316: testing Many kinds of errors ...
 ./named-refs.at:384: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o test.c test.y
-./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-none input.y --warnings=error
-69. input.at:3205:  ok
-./named-refs.at:426: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o test.c test.y
-
-./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o test.c test.y -Werror
-./named-refs.at:184: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 
-./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S yacc.c -d input.y
-./input.at:2704: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Slalr1.java -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y
+./named-refs.at:426: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o test.c test.y
 75. named-refs.at:551: testing Missing identifiers in brackets ...
 ./named-refs.at:559: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o test.c test.y
-74. named-refs.at:316:  ok
 76. named-refs.at:567: testing Redundant words in brackets ...
-75. named-refs.at:551:  ok
 ./named-refs.at:575: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o test.c test.y
-./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-none input.y -Wnone,none -Werror --trace=none
+75. named-refs.at:551:  ok
+74. named-refs.at:316:  ok
 76. named-refs.at:567:  ok
+./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-none input.y -Werror
+./input.at:2704: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Slalr1.java -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y
+
 
-stderr:
-test.y:11.22-29: error: misleading reference: '$foo.bar' [-Werror=other]
-test.y:11.8-10: note: refers to: $foo at $1
-test.y:11.12-18: note: possibly meant: $[foo.bar] at $2
 
-./named-refs.at:306: sed 's,.*/$,,' stderr 1>&2
 ./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S glr.c -d input.y
 78. named-refs.at:599: testing Stray symbols in brackets ...
 77. named-refs.at:583: testing Comments in brackets ...
 ./named-refs.at:591: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o test.c test.y
-
-./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o test.c test.y --warnings=error
 ./named-refs.at:607: "$PERL" -pi -e 's/\\(\d{3})/chr(oct($1))/ge' test.y || exit 77
-77. named-refs.at:583:  ok
 79. named-refs.at:618: testing Redundant words in LHS brackets ...
-./named-refs.at:608: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o test.c test.y
-./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-none input.y --warnings=none -Werror --trace=none
 ./named-refs.at:625: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o test.c test.y
-58. input.at:2675:  ok
-78. named-refs.at:599:  ok
+./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o test.c test.y -Werror
+./named-refs.at:608: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o test.c test.y
+77. named-refs.at:583:  ok
 79. named-refs.at:618:  ok
+78. named-refs.at:599:  ok
 
 
 
-
-81. named-refs.at:648: testing Unresolved references ...
-./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S lalr1.cc -d input.y
+stderr:
+input.y:2.15: error: stray '$' [-Werror=other]
 80. named-refs.at:635: testing Factored LHS ...
-83. output.at:68: testing Output files:  -dv ...
+./named-refs.at:642: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o test.c test.y
+81. named-refs.at:648: testing Unresolved references ...
+./input.at:2733: sed 's,.*/$,,' stderr 1>&2
 ./named-refs.at:676: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o test.c test.y
+58. input.at:2675:  ok
 82. named-refs.at:715: testing $ or @ followed by . or - ...
-./named-refs.at:642: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o test.c test.y
 ./named-refs.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret test.y
-./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o test.c test.y -Wnone,none -Werror --trace=none
-./output.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -dv foo.y
-./input.at:2738: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror,none,other input.y
+./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-none input.y --warnings=error
 81. named-refs.at:648:  ok
 ./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret test.y
 
 stderr:
+./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S lalr1.cc -d input.y
+test.y:11.22-29: error: misleading reference: '$foo.bar' [-Werror=other]
+test.y:11.8-10: note: refers to: $foo at $1
+test.y:11.12-18: note: possibly meant: $[foo.bar] at $2
+
+./named-refs.at:306: sed 's,.*/$,,' stderr 1>&2
+83. output.at:68: testing Output files:  -dv ...
+./output.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -dv foo.y
 84. output.at:74: testing Output files:  -dv >&- ...
 ./output.at:74: case "$PREBISON" in *valgrind*) exit 77;; esac
-./input.at:2741: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror,no-all,other input.y
+./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o test.c test.y --warnings=error
+./output.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -dv >&- foo.y
+80. named-refs.at:635:  ok
+./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-none input.y -Wnone,none -Werror --trace=none
+
+./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret test.y -Werror
+./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S glr.cc -d input.y
+85. output.at:81: testing Output files:  -dv -o foo.c ...
+./output.at:81: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -dv -o foo.c foo.y
+stderr:
 ./output.at:68: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.y|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-./output.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -dv >&- foo.y
-./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S glr.cc -d input.y
-./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret test.y -Werror
-./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o test.c test.y --warnings=none -Werror --trace=none
-80. named-refs.at:635:  ok
-83. output.at:68:  ok
-
-
 stderr:
-85. output.at:81: testing Output files:  -dv -o foo.c ...
 ./output.at:74: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.y|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-./output.at:81: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -dv -o foo.c foo.y
-./input.at:2746: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Werror -Wno-error=other input.y
+83. output.at:68:  ok
+./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o test.c test.y -Wnone,none -Werror --trace=none
 84. output.at:74:  ok
-73. named-refs.at:297:  ok
-86. output.at:84: testing Output files:  -dv -y ...
-./output.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -dv -y foo.y
+
+./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-none input.y --warnings=none -Werror --trace=none
+
 stderr:
+86. output.at:84: testing Output files:  -dv -y ...
 test.y:4.9: error: stray '$' [-Werror=other]
 test.y:5.9: error: stray '@' [-Werror=other]
-
-
-./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S glr2.cc -d input.y
-./named-refs.at:740: sed 's,.*/$,,' stderr 1>&2
-88. output.at:92: testing Output files:  -dv -o foo.tab.c ...
-./output.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -dv -o foo.tab.c foo.y
-./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret test.y --warnings=error
-87. output.at:87: testing Output files: api.header.include={"./foo.h"} -dv -y ...
-./output.at:87: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -dv -y foo.y
+./output.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -dv -y foo.y
 stderr:
 ./output.at:81: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.y|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
+87. output.at:87: testing Output files: api.header.include={"./foo.h"} -dv -y ...
+./named-refs.at:740: sed 's,.*/$,,' stderr 1>&2
+./output.at:87: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -dv -y foo.y
 ./output.at:81: grep '#include "foo.h"' foo.c
-./input.at:2750: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-error=other -Werror input.y
+./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret test.y --warnings=error
 stdout:
-./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S lalr1.d  input.y
 #include "foo.h"
 85. output.at:81:  ok
+./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o test.c test.y --warnings=none -Werror --trace=none
+./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S glr2.cc -d input.y
+
+./input.at:2738: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror,none,other input.y
+88. output.at:92: testing Output files:  -dv -o foo.tab.c ...
+./output.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -dv -o foo.tab.c foo.y
+stderr:
 stderr:
 ./output.at:84: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.y|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-
-stderr:
 foo.y:1.1-7: warning: POSIX Yacc does not support %define [-Wyacc]
 ./output.at:87: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.y|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
+./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret test.y -Wnone,none -Werror --trace=none
 ./output.at:84: grep '#include "y.tab.h"' y.tab.c
-stderr:
-./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S lalr1.java  input.y
 stdout:
+./output.at:87: grep '#include "./foo.h"' y.tab.c
+86. output.at:84:  ok
+stdout:
+#include "./foo.h"
+87. output.at:87:  ok
+73. named-refs.at:297:  ok
+
+./input.at:2741: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror,no-all,other input.y
+
+
+./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S lalr1.d  input.y
+stderr:
 ./output.at:92: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.y|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret test.y -Wnone,none -Werror --trace=none
-./input.at:2754: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Werror=other -Wno-other input.y
 89. output.at:95: testing Output files:  --fixed-output-files -dv -g --html ...
-86. output.at:84:  ok
 89. output.at:95:  skipped (output.at:95)
-./output.at:87: grep '#include "./foo.h"' y.tab.c
-stdout:
-#include "./foo.h"
+90. output.at:97: testing Output files:  -Hfoo.header -v -gfoo.gv --html=foo.html ...
+91. output.at:100: testing Output files:  -dv -g --xml --fixed-output-files ...
+90. output.at:97:  skipped (output.at:97)
+./output.at:100: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -dv -g --xml --fixed-output-files foo.y
 88. output.at:92:  ok
-87. output.at:87:  ok
 
 
-
-90. output.at:97: testing Output files:  -Hfoo.header -v -gfoo.gv --html=foo.html ...
-90. output.at:97: 91. output.at:100: testing Output files:  -dv -g --xml --fixed-output-files ...
- skipped (output.at:97)
+./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret test.y --warnings=none -Werror --trace=none
 
 92. output.at:102: testing Output files:  -dv -g --xml -y ...
-63. input.at:2883:  ok
-59. input.at:2719:  ok
-./output.at:100: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -dv -g --xml --fixed-output-files foo.y
 ./output.at:102: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -dv -g --xml -y foo.y
-
 93. output.at:104: testing Output files: %require "3.4" -dv -g --xml -y ...
 ./output.at:104: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -dv -g --xml -y foo.y
-
-
-./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret test.y --warnings=none -Werror --trace=none
 94. output.at:107: testing Output files:  -dv -g --xml -o y.tab.c ...
+./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -S lalr1.java  input.y
+./input.at:2746: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Werror -Wno-error=other input.y
 ./output.at:107: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -dv -g --xml -o y.tab.c foo.y
-96. output.at:112: testing Output files:  -dv -g -o foo.c ...
-./output.at:112: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -dv -g -o foo.c foo.y
-95. output.at:110: testing Output files:  -dv -b bar ...
-./output.at:110: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -dv -b bar foo.y
 stderr:
-./output.at:102: find . -type f |
-           "$PERL" -ne '
-      s,\./,,; chomp;
-      push @file, $_ unless m{^(foo.y|testsuite.log)$};
-      END { print join (" ", sort @file), "\n" }' || exit 77
-stderr:
-82. named-refs.at:715:  ok
 <command line>:6: warning: deprecated option: '--fixed-output-files', use '-o y.tab.c' [-Wdeprecated]
 ./output.at:100: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.y|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-92. output.at:102:  ok
-stderr:
+82. named-refs.at:715:  ok
+91. output.at:100:  ok
+
+63. input.at:2883:  ok
 stderr:
-./output.at:107: find . -type f |
+
+./input.at:2750: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-error=other -Werror input.y
+./output.at:102: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.y|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-./output.at:112: find . -type f |
+stderr:
+./output.at:107: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.y|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-91. output.at:100:  ok
-
 stderr:
 stderr:
-./output.at:110: find . -type f |
-           "$PERL" -ne '
-      s,\./,,; chomp;
-      push @file, $_ unless m{^(foo.y|testsuite.log)$};
-      END { print join (" ", sort @file), "\n" }' || exit 77
-94. output.at:107:  ok
 foo.y:1.1-8: warning: POSIX Yacc does not support %require [-Wyacc]
 foo.y:1.10-14: warning: POSIX Yacc does not support string literals [-Wyacc]
 ./output.at:104: find . -type f |
@@ -4478,448 +4495,468 @@
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.y|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-96. output.at:112:  ok
-
-97. output.at:116: testing Output files: %header %verbose  ...
-./output.at:116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.y
-95. output.at:110:  ok
+95. output.at:110: testing Output files:  -dv -b bar ...
+stdout:
+./named-refs.at:185:  $PREPARSER ./test input.txt
+92. output.at:102:  ok
 
+stderr:
+96. output.at:112: testing Output files:  -dv -g -o foo.c ...
+94. output.at:107:  ok
+./output.at:110: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -dv -b bar foo.y
+./named-refs.at:185: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 93. output.at:104:  ok
+./output.at:112: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -dv -g -o foo.c foo.y
+stderr:
+71. named-refs.at:22:  ok
 
-98. output.at:118: testing Output files: %header %verbose %yacc  ...
-./output.at:118: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.y
 
+97. output.at:116: testing Output files: %header %verbose  ...
+
+./output.at:116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.y
 
+98. output.at:118: testing Output files: %header %verbose %yacc  ...
 99. output.at:121: testing Output files: %header %verbose %yacc  ...
-100. output.at:125: testing Output files: %file-prefix "bar" %header %verbose  ...
-
+./output.at:118: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.y
 ./output.at:121: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.yy
+100. output.at:125: testing Output files: %file-prefix "bar" %header %verbose  ...
 ./output.at:125: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.y
-102. output.at:129: testing Output files: %file-prefix "baz" %output "bar.c" %header %verbose %yacc  ...
-./output.at:129: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.y
 101. output.at:127: testing Output files: %output "bar.c" %header %verbose %yacc  ...
-103. output.at:136: testing Output files: %header %verbose  ...
+./input.at:2754: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Werror=other -Wno-other input.y
 ./output.at:127: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.y
-./output.at:136: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.yy
 stderr:
-./output.at:116: find . -type f |
+./output.at:110: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.y|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
 stderr:
-97. output.at:116:  ok
-./output.at:118: find . -type f |
+./output.at:112: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.y|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
+95. output.at:110:  ok
 stderr:
-stderr:
-./output.at:125: find . -type f |
+96. output.at:112:  ok
+./output.at:116: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.y|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-./output.at:127: find . -type f |
+
+97. output.at:116:  ok
+
+stderr:
+./output.at:125: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.y|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-98. output.at:118:  ok
+59. input.at:2719:  ok
 
+102. output.at:129: testing Output files: %file-prefix "baz" %output "bar.c" %header %verbose %yacc  ...
 stderr:
-100. output.at:125:  ok
-./output.at:129: find . -type f |
+stderr:
+stderr:
+./output.at:129: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.y
+./output.at:118: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.y|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-101. output.at:127:  ok
-stderr:
-stderr:
-stdout:
-stderr:
-
-104. output.at:139: testing Output files: %header %verbose  -o foo.c ...
 ./output.at:121: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.yy|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-./output.at:136: find . -type f |
+./output.at:127: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
-      push @file, $_ unless m{^(foo.yy|testsuite.log)$};
+      push @file, $_ unless m{^(foo.y|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-./named-refs.at:185:  $PREPARSER ./test input.txt
+103. output.at:136: testing Output files: %header %verbose  ...
+100. output.at:125:  ok
+./output.at:136: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.yy
+
+98. output.at:118: 104. output.at:139: testing Output files: %header %verbose  -o foo.c ...
+ ok
+101. output.at:127:  ok
 99. output.at:121:  ok
 ./output.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o foo.c foo.yy
-102. output.at:129:  ok
-103. output.at:136: stderr:
- ok
 
-./named-refs.at:185: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 
-stderr:
+
+
 105. output.at:142: testing Output files:  --header=foo.hpp -o foo.c++ ...
-71. named-refs.at:22:  ok
 ./output.at:142: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --header=foo.hpp -o foo.c++ foo.yy
 106. output.at:146: testing Output files:  --header=foo.hpp -o foo.c++ ...
-
-./output.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --header=foo.hpp -o foo.c++ foo.yy
-
 107. output.at:150: testing Output files: %header "foo.hpp" -o foo.c++ ...
-
+./output.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --header=foo.hpp -o foo.c++ foo.yy
+108. output.at:154: testing Output files:  -o foo.c++ --graph=foo.gph ...
+109. output.at:160: testing Output files: %type <foo> useless --header --graph --xml --report=all -Wall -Werror ...
+./output.at:150: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o foo.c++ foo.yy
+./output.at:160: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --header --graph --xml --report=all -Wall -Werror foo.y
+./output.at:154: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o foo.c++ --graph=foo.gph foo.yy
 stderr:
-./output.at:139: find . -type f |
+./output.at:129: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
-      push @file, $_ unless m{^(foo.yy|testsuite.log)$};
+      push @file, $_ unless m{^(foo.y|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-./output.at:150: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o foo.c++ foo.yy
-
-110. output.at:167: testing Output files: useless=--header --graph --xml --report=all -Wall -Werror ...
-104. output.at:139:  ok
-./output.at:167: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --header --graph --xml --report=all -Wall -Werror foo.y
-108. output.at:154: testing Output files:  -o foo.c++ --graph=foo.gph ...
-111. output.at:173: testing Output files: %defines -o foo.c++ ...
-./output.at:154: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o foo.c++ --graph=foo.gph foo.yy
-
-109. output.at:160: testing Output files: %type <foo> useless --header --graph --xml --report=all -Wall -Werror ...
-./output.at:160: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --header --graph --xml --report=all -Wall -Werror foo.y
-./output.at:173: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o foo.c++ foo.yy
 stderr:
-112. output.at:176: testing Output files: %defines "foo.hpp" -o foo.c++ ...
-./output.at:150: find . -type f |
+./output.at:136: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.yy|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-./output.at:176: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o foo.c++ foo.yy
-107. output.at:150:  ok
+102. output.at:129:  ok
 stderr:
+./output.at:139: find . -type f |
+           "$PERL" -ne '
+      s,\./,,; chomp;
+      push @file, $_ unless m{^(foo.yy|testsuite.log)$};
+      END { print join (" ", sort @file), "\n" }' || exit 77
+103. output.at:136:  ok
 stderr:
+104. output.at:139:  ok
+
 ./output.at:142: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.yy|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-./output.at:146: find . -type f |
+
+stderr:
+stderr:
+./output.at:150: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.yy|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-stderr:
-./output.at:173: find . -type f |
+./output.at:146: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.yy|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-stderr:
 
+105. output.at:142:  ok
 stderr:
 foo.y:1.13-19: error: symbol 'useless' is used, but is not defined as a token and has no rules [-Werror=other]
 foo.y: error: 1 nonterminal useless in grammar [-Werror=other]
 foo.y:1.13-19: error: nonterminal useless in grammar: useless [-Werror=other]
-./output.at:154: find . -type f |
-           "$PERL" -ne '
-      s,\./,,; chomp;
-      push @file, $_ unless m{^(foo.yy|testsuite.log)$};
-      END { print join (" ", sort @file), "\n" }' || exit 77
 ./output.at:160: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.y|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-105. output.at:142:  ok
-111. output.at:173:  ok
-106. output.at:146:  ok
-109. output.at:160:  ok
+110. output.at:167: testing Output files: useless=--header --graph --xml --report=all -Wall -Werror ...
 stderr:
-108. output.at:154:  ok
-113. output.at:191: testing Output files: lalr1.cc  ...
-./output.at:176: find . -type f |
+./output.at:167: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --header --graph --xml --report=all -Wall -Werror foo.y
+./output.at:154: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.yy|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-./output.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.yy
+106. output.at:146:  ok
+107. output.at:150:  ok
+111. output.at:173: testing Output files: %defines -o foo.c++ ...
+./output.at:173: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o foo.c++ foo.yy
 
-stderr:
+112. output.at:176: testing Output files: %defines "foo.hpp" -o foo.c++ ...
+109. output.at:160:  ok
+./output.at:176: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o foo.c++ foo.yy
+108. output.at:154:  ok
 
-foo.y:1.1-15: error: %define variable 'useless' is not used
-./output.at:167: find . -type f |
-           "$PERL" -ne '
-      s,\./,,; chomp;
-      push @file, $_ unless m{^(foo.y|testsuite.log)$};
-      END { print join (" ", sort @file), "\n" }' || exit 77
 
-112. output.at:176:  ok
-114. output.at:194: testing Output files: lalr1.cc %verbose  ...
 
+113. output.at:191: testing Output files: lalr1.cc  ...
 
-110. output.at:167:  ok
-116. output.at:200: testing Output files: lalr1.cc %verbose %locations  ...
-./output.at:194: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.yy
-./output.at:200: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.yy
+./output.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.yy
 115. output.at:197: testing Output files: lalr1.cc %header %verbose  ...
-
+114. output.at:194: testing Output files: lalr1.cc %verbose  ...
 ./output.at:197: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.yy
-118. output.at:206: testing Output files: lalr1.cc %header %verbose  ...
+./output.at:194: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.yy
+116. output.at:200: testing Output files: lalr1.cc %verbose %locations  ...
+./output.at:200: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.yy
 117. output.at:203: testing Output files: lalr1.cc %header %verbose %locations  ...
-
 ./output.at:203: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.yy
-119. output.at:210: testing Output files: lalr1.cc %header %verbose %locations -o subdir/foo.cc ...
-./output.at:206: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  subdir/foo.yy
 stderr:
-./output.at:194: find . -type f |
+foo.y:1.1-15: error: %define variable 'useless' is not used
+./output.at:167: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
-      push @file, $_ unless m{^(foo.yy|testsuite.log)$};
+      push @file, $_ unless m{^(foo.y|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-120. output.at:215: testing Output files: lalr1.cc %header %verbose %file-prefix "output_dir/foo"  ...
 stderr:
-./output.at:200: find . -type f |
+stderr:
+./output.at:173: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.yy|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-114. output.at:194:  ok
-116. output.at:200:  ok
-stderr:
-./output.at:197: find . -type f |
+./output.at:176: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.yy|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
+110. output.at:167:  ok
+111. output.at:173:  ok
+112. output.at:176:  ok
 stderr:
 ./output.at:191: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.yy|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
+stderr:
+stderr:
+./output.at:194: find . -type f |
+           "$PERL" -ne '
+      s,\./,,; chomp;
+      push @file, $_ unless m{^(foo.yy|testsuite.log)$};
+      END { print join (" ", sort @file), "\n" }' || exit 77
+
+./output.at:197: find . -type f |
+           "$PERL" -ne '
+      s,\./,,; chomp;
+      push @file, $_ unless m{^(foo.yy|testsuite.log)$};
+      END { print join (" ", sort @file), "\n" }' || exit 77
 
 
-115. output.at:197:  ok
+113. output.at:191:  ok
 stderr:
-./output.at:203: find . -type f |
+./output.at:200: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.yy|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-113. output.at:191:  ok
-122. output.at:226: testing Output files: lalr1.cc %header %locations api.location.file=none %require "3.2"  ...
-./output.at:226: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.yy
-117. output.at:203:  ok
+114. output.at:194:  ok
+115. output.at:197:  ok
 stderr:
-./output.at:215: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  gram_dir/foo.yy
-121. output.at:220: testing Output files: lalr1.cc %header %locations %verbose %file-prefix "output_dir/foo"  ...
-./output.at:210: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o subdir/foo.cc subdir/foo.yy
-./output.at:206: find . -type f |
+./output.at:203: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
-      push @file, $_ unless m{^(subdir/foo.yy|testsuite.log)$};
+      push @file, $_ unless m{^(foo.yy|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
+118. output.at:206: testing Output files: lalr1.cc %header %verbose  ...
+120. output.at:215: testing Output files: lalr1.cc %header %verbose %file-prefix "output_dir/foo"  ...
+116. output.at:200:  ok
+119. output.at:210: testing Output files: lalr1.cc %header %verbose %locations -o subdir/foo.cc ...
 
 
 
-./output.at:206: grep 'include .subdir/' foo.tab.cc
-./output.at:206: grep 'include .subdir/' foo.tab.hh
-124. output.at:237: testing Output files: lalr1.cc %header %locations api.location.file="$at_dir/foo.loc.hh" %require "3.2"  ...
+117. output.at:203:  ok
+./output.at:206: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  subdir/foo.yy
+
+121. output.at:220: testing Output files: lalr1.cc %header %locations %verbose %file-prefix "output_dir/foo"  ...
+122. output.at:226: testing Output files: lalr1.cc %header %locations api.location.file=none %require "3.2"  ...
+
 123. output.at:231: testing Output files: lalr1.cc %header %locations api.location.file="foo.loc.hh" %require "3.2"  ...
-./output.at:237: "$PERL" -pi -e 's{\$at_dir}'"{$at_group_dir}g" foo.yy || exit 77
-118. output.at:206:  ok
 ./output.at:231: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.yy
+./output.at:226: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.yy
+124. output.at:237: testing Output files: lalr1.cc %header %locations api.location.file="$at_dir/foo.loc.hh" %require "3.2"  ...
+./output.at:237: "$PERL" -pi -e 's{\$at_dir}'"{$at_group_dir}g" foo.yy || exit 77
 125. output.at:267: testing Conflicting output files:  --graph="foo.tab.c" ...
-./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --graph="foo.tab.c" foo.y
 ./output.at:237: rm -f foo.yy.bak
+./output.at:215: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  gram_dir/foo.yy
+./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --graph="foo.tab.c" foo.y
 ./output.at:237: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  foo.yy
+./output.at:210: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o subdir/foo.cc subdir/foo.yy
 stderr:
-stderr:
-
-./output.at:215: find . -type f |
-           "$PERL" -ne '
-      s,\./,,; chomp;
-      push @file, $_ unless m{^(gram_dir/foo.yy|testsuite.log)$};
-      END { print join (" ", sort @file), "\n" }' || exit 77
-./output.at:210: find . -type f |
+./output.at:206: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(subdir/foo.yy|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
 stderr:
-120. output.at:215:  ok
-./output.at:210: grep 'include .subdir/' subdir/foo.cc
+./output.at:220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  gram_dir/foo.yy
 ./output.at:226: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.yy|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
 stderr:
-126. output.at:272: testing Conflicting output files: %header "foo.output" -v ...
-./output.at:210: grep 'include .subdir/' subdir/foo.hh
-./output.at:220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  gram_dir/foo.yy
-./output.at:237: find . -type f |
+./output.at:206: grep 'include .subdir/' foo.tab.cc
+./output.at:206: grep 'include .subdir/' foo.tab.hh
+./output.at:231: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.yy|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --graph="foo.tab.c" foo.y -Werror
-119. output.at:210:  ok
-./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -v foo.y
-
+118. output.at:206:  ok
+stderr:
 122. output.at:226:  ok
+./output.at:215: find . -type f |
+           "$PERL" -ne '
+      s,\./,,; chomp;
+      push @file, $_ unless m{^(gram_dir/foo.yy|testsuite.log)$};
+      END { print join (" ", sort @file), "\n" }' || exit 77
+123. output.at:231:  ok
+./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --graph="foo.tab.c" foo.y -Werror
 stderr:
-./output.at:231: find . -type f |
+./output.at:237: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(foo.yy|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
+120. output.at:215:  ok
+
+
+
+stderr:
 124. output.at:237:  ok
+./output.at:210: find . -type f |
+           "$PERL" -ne '
+      s,\./,,; chomp;
+      push @file, $_ unless m{^(subdir/foo.yy|testsuite.log)$};
+      END { print join (" ", sort @file), "\n" }' || exit 77
 
-123. output.at:231:  ok
 127. output.at:277: testing Conflicting output files: lalr1.cc %header %locations --graph="location.hh" ...
+./output.at:210: grep 'include .subdir/' subdir/foo.cc
+126. output.at:272: testing Conflicting output files: %header "foo.output" -v ...
 
 ./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --graph="location.hh" foo.y
-
+./output.at:210: grep 'include .subdir/' subdir/foo.hh
+./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -v foo.y
 128. output.at:282: testing Conflicting output files:  -o foo.y ...
 ./output.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o foo.y foo.y
-./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v foo.y -Werror
-
+119. output.at:210:  ok
 129. output.at:328: testing Output file name: `~!@#$%^&*()-=_+{}[]|\:;<>, .' ...
 ./output.at:328: touch "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.tmp" || exit 77
 stderr:
-foo.y: error: conflicting outputs to file 'foo.tab.c' [-Werror=other]
-stderr:
-130. output.at:335: testing Output file name: ( ...
-./output.at:335: touch "(.tmp" || exit 77
+./output.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" --header="\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.h" glr.y
 ./output.at:220: find . -type f |
            "$PERL" -ne '
       s,\./,,; chomp;
       push @file, $_ unless m{^(gram_dir/foo.yy|testsuite.log)$};
       END { print join (" ", sort @file), "\n" }' || exit 77
-./output.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" --header="\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.h" glr.y
+130. output.at:335: testing Output file name: ( ...
+./output.at:335: touch "(.tmp" || exit 77
+stderr:
+
+foo.y: error: conflicting outputs to file 'foo.tab.c' [-Werror=other]
 ./output.at:335: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "(.c" --header="(.h" glr.y
+121. output.at:220:  ok
 ./output.at:267: sed 's,.*/$,,' stderr 1>&2
+./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --graph="foo.tab.c" foo.y --warnings=error
 131. output.at:336: testing Output file name: ) ...
 ./output.at:336: touch ").tmp" || exit 77
-121. output.at:220:  ok
-./output.at:336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o ").c" --header=").h" glr.y
-./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --graph="foo.tab.c" foo.y --warnings=error
-stderr:
-foo.y: error: conflicting outputs to file 'foo.output' [-Werror=other]
 
-./output.at:272: sed 's,.*/$,,' stderr 1>&2
+./output.at:336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o ").c" --header=").h" glr.y
+./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v foo.y -Werror
 ./output.at:282: cat foo.y
 ./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --graph="location.hh" foo.y -Werror
-./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v foo.y --warnings=error
-128. output.at:282:  ok
-./output.at:335: ls "(.c" "(.h"
-stdout:
-(.c
-(.h
-./output.at:335: $CC $CFLAGS $CPPFLAGS  -c -o glr.o -c "(.c" 
 132. output.at:337: testing Output file name: # ...
 ./output.at:337: touch "#.tmp" || exit 77
 ./output.at:328: ls "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.h"
 ./output.at:337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "#.c" --header="#.h" glr.y
+128. output.at:282:  ok
 stdout:
 `~!@#$%^&*()-=_+{}[]|\:;<>, .'.c
 `~!@#$%^&*()-=_+{}[]|\:;<>, .'.h
-./output.at:336: ls ").c" ").h"
 ./output.at:328: $CC $CFLAGS $CPPFLAGS  -c -o glr.o -c "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" 
-
+./output.at:335: ls "(.c" "(.h"
 stdout:
-stderr:
-).c
-).h
-foo.y: error: conflicting outputs to file 'location.hh' [-Werror=other]
-./output.at:336: $CC $CFLAGS $CPPFLAGS  -c -o glr.o -c ").c" 
+
+(.c
+(.h
+./output.at:335: $CC $CFLAGS $CPPFLAGS  -c -o glr.o -c "(.c" 
 ./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --graph="foo.tab.c" foo.y -Wnone,none -Werror --trace=none
-./output.at:277: sed 's,.*/$,,' stderr 1>&2
 133. output.at:338: testing Output file name: @@ ...
 ./output.at:338: touch "@@.tmp" || exit 77
-./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v foo.y -Wnone,none -Werror --trace=none
+./output.at:336: ls ").c" ").h"
+stdout:
 ./output.at:338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "@@.c" --header="@@.h" glr.y
-./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --graph="location.hh" foo.y --warnings=error
+).c
+).h
+./output.at:336: $CC $CFLAGS $CPPFLAGS  -c -o glr.o -c ").c" 
+stderr:
+foo.y: error: conflicting outputs to file 'foo.output' [-Werror=other]
+stderr:
+foo.y: error: conflicting outputs to file 'location.hh' [-Werror=other]
+./output.at:272: sed 's,.*/$,,' stderr 1>&2
 ./output.at:337: ls "#.c" "#.h"
 stdout:
+./output.at:277: sed 's,.*/$,,' stderr 1>&2
 #.c
 #.h
+./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v foo.y --warnings=error
 ./output.at:337: $CC $CFLAGS $CPPFLAGS  -c -o glr.o -c "#.c" 
-./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v foo.y --warnings=none -Werror --trace=none
+./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --graph="location.hh" foo.y --warnings=error
 ./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --graph="foo.tab.c" foo.y --warnings=none -Werror --trace=none
-./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --graph="location.hh" foo.y -Wnone,none -Werror --trace=none
 ./output.at:338: ls "@@.c" "@@.h"
 stdout:
 @@.c
 @@.h
 ./output.at:338: $CC $CFLAGS $CPPFLAGS  -c -o glr.o -c "@@.c" 
-./output.at:272: cat foo.y
+./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v foo.y -Wnone,none -Werror --trace=none
+./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --graph="location.hh" foo.y -Wnone,none -Werror --trace=none
 ./output.at:267: cat foo.y
-126. output.at:272:  ok
 125. output.at:267:  ok
-./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --graph="location.hh" foo.y --warnings=none -Werror --trace=none
 
 134. output.at:339: testing Output file name: @{ ...
 ./output.at:339: touch "@{.tmp" || exit 77
-
+./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v foo.y --warnings=none -Werror --trace=none
 ./output.at:339: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "@{.c" --header="@{.h" glr.y
-135. output.at:340: testing Output file name: @} ...
-./output.at:340: touch "@}.tmp" || exit 77
-./output.at:340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "@}.c" --header="@}.h" glr.y
+./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --graph="location.hh" foo.y --warnings=none -Werror --trace=none
+./output.at:272: cat foo.y
 ./output.at:339: ls "@{.c" "@{.h"
-./output.at:277: cat foo.y
+126. output.at:272:  ok
 stdout:
-127. output.at:277:  ok
 @{.c
 @{.h
 ./output.at:339: $CC $CFLAGS $CPPFLAGS  -c -o glr.o -c "@{.c" 
+./output.at:277: cat foo.y
+127. output.at:277:  ok
+
+
+135. output.at:340: testing Output file name: @} ...
+./output.at:340: touch "@}.tmp" || exit 77
+./output.at:340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "@}.c" --header="@}.h" glr.y
+136. output.at:341: testing Output file name: [ ...
+./output.at:341: touch "[.tmp" || exit 77
+./output.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "[.c" --header="[.h" glr.y
 ./output.at:340: ls "@}.c" "@}.h"
 stdout:
 @}.c
 @}.h
 ./output.at:340: $CC $CFLAGS $CPPFLAGS  -c -o glr.o -c "@}.c" 
-
-136. output.at:341: testing Output file name: [ ...
-./output.at:341: touch "[.tmp" || exit 77
-./output.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "[.c" --header="[.h" glr.y
 ./output.at:341: ls "[.c" "[.h"
 stdout:
 [.c
 [.h
 ./output.at:341: $CC $CFLAGS $CPPFLAGS  -c -o glr.o -c "[.c" 
 stderr:
-stdout:
-./output.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" --header="\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.hh" cxx.y
 stderr:
 stdout:
-./output.at:335: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "(.cc" --header="(.hh" cxx.y
-./output.at:328: ls "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.hh"
 stdout:
-`~!@#$%^&*()-=_+{}[]|\:;<>, .'.cc
-`~!@#$%^&*()-=_+{}[]|\:;<>, .'.hh
-./output.at:328: $CXX $CPPFLAGS  $CXXFLAGS -c -o cxx.o -c "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" 
+./output.at:335: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "(.cc" --header="(.hh" cxx.y
+./output.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" --header="\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.hh" cxx.y
 stderr:
 stdout:
 ./output.at:336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o ").cc" --header=").hh" cxx.y
 ./output.at:335: ls "(.cc" "(.hh"
+./output.at:328: ls "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.hh"
+stdout:
 stdout:
 (.cc
 (.hh
+stderr:
+`~!@#$%^&*()-=_+{}[]|\:;<>, .'.cc
+`~!@#$%^&*()-=_+{}[]|\:;<>, .'.hh
 ./output.at:335: $CXX $CPPFLAGS  $CXXFLAGS -c -o cxx.o -c "(.cc" 
-./output.at:336: ls ").cc" ").hh"
 stdout:
-).cc
-).hh
-./output.at:336: $CXX $CPPFLAGS  $CXXFLAGS -c -o cxx.o -c ").cc" 
+./output.at:328: $CXX $CPPFLAGS  $CXXFLAGS -c -o cxx.o -c "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" 
+./output.at:337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "#.cc" --header="#.hh" cxx.y
 stderr:
+./output.at:336: ls ").cc" ").hh"
 stdout:
-stderr:
-./output.at:337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "#.cc" --header="#.hh" cxx.y
 stdout:
+).cc
+).hh
 ./output.at:338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "@@.cc" --header="@@.hh" cxx.y
+./output.at:336: $CXX $CPPFLAGS  $CXXFLAGS -c -o cxx.o -c ").cc" 
 ./output.at:337: ls "#.cc" "#.hh"
 stdout:
 #.cc
@@ -4932,82 +4969,78 @@
 ./output.at:338: $CXX $CPPFLAGS  $CXXFLAGS -c -o cxx.o -c "@@.cc" 
 stderr:
 stdout:
-stderr:
-./output.at:340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "@}.cc" --header="@}.hh" cxx.y
-stdout:
 ./output.at:339: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "@{.cc" --header="@{.hh" cxx.y
 ./output.at:339: ls "@{.cc" "@{.hh"
 stdout:
 @{.cc
 @{.hh
 ./output.at:339: $CXX $CPPFLAGS  $CXXFLAGS -c -o cxx.o -c "@{.cc" 
-./output.at:340: ls "@}.cc" "@}.hh"
-stdout:
-@}.cc
-@}.hh
 stderr:
-./output.at:340: $CXX $CPPFLAGS  $CXXFLAGS -c -o cxx.o -c "@}.cc" 
+stderr:
 stdout:
+stdout:
+./output.at:340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "@}.cc" --header="@}.hh" cxx.y
 ./output.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "[.cc" --header="[.hh" cxx.y
 ./output.at:341: ls "[.cc" "[.hh"
+./output.at:340: ls "@}.cc" "@}.hh"
+stdout:
 stdout:
+@}.cc
+@}.hh
 [.cc
 [.hh
+./output.at:340: $CXX $CPPFLAGS  $CXXFLAGS -c -o cxx.o -c "@}.cc" 
 ./output.at:341: $CXX $CPPFLAGS  $CXXFLAGS -c -o cxx.o -c "[.cc" 
 stderr:
 stdout:
-130. output.at:335:  ok
-
+129. output.at:328:  ok
 stderr:
 stdout:
-131. output.at:336:  ok
-137. output.at:342: testing Output file name: ] ...
-./output.at:342: touch "].tmp" || exit 77
-./output.at:342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "].c" --header="].h" glr.y
-stderr:
 
+130. output.at:335:  ok
+stderr:
 stdout:
-135. output.at:340:  ok
 
+131. output.at:336: 137. output.at:342: testing Output file name: ] ...
+./output.at:342: touch "].tmp" || exit 77
+ ok
+./output.at:342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "].c" --header="].h" glr.y
 138. output.at:363: testing Graph with no conflicts ...
 ./output.at:363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall --graph input.y
+
 139. output.at:403: testing Graph with unsolved S/R ...
 ./output.at:403: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall --graph input.y
 ./output.at:342: ls "].c" "].h"
 stdout:
+stderr:
 ].c
 ].h
 ./output.at:342: $CC $CFLAGS $CPPFLAGS  -c -o glr.o -c "].c" 
-stderr:
-input.y: warning: 3 shift/reduce conflicts [-Wconflicts-sr]
-input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-input.y:10.10-18: warning: rule useless in parser due to conflicts [-Wother]
-input.y:11.10-18: warning: rule useless in parser due to conflicts [-Wother]
-input.y:12.10-18: warning: rule useless in parser due to conflicts [-Wother]
-./output.at:403: grep -v // input.gv
-stderr:
-stdout:
-139. output.at:403:  ok
-129. output.at:328:  ok
-stderr:
 stdout:
+132. output.at:337:  ok
 stderr:
-133. output.at:338:  ok
 ./output.at:363: grep -v // input.gv
-
-
 138. output.at:363:  ok
 
+stderr:
+stdout:
+133. output.at:338: 
+ ok
 140. output.at:473: testing Graph with solved S/R ...
 ./output.at:473: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall --graph input.y
 stderr:
+input.y: warning: 3 shift/reduce conflicts [-Wconflicts-sr]
+input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+input.y:10.10-18: warning: rule useless in parser due to conflicts [-Wother]
+input.y:11.10-18: warning: rule useless in parser due to conflicts [-Wother]
+input.y:12.10-18: warning: rule useless in parser due to conflicts [-Wother]
+./output.at:403: grep -v // input.gv
 
-stdout:
-142. output.at:576: testing Graph with reductions with multiple LAT ...
-./output.at:576: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall --graph input.y
-134. output.at:339:  ok
 141. output.at:538: testing Graph with R/R ...
 ./output.at:538: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall --graph input.y
+139. output.at:403:  ok
+142. output.at:576: testing Graph with reductions with multiple LAT ...
+./output.at:576: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall --graph input.y
 
 143. output.at:641: testing Graph with a reduction rule both enabled and disabled ...
 ./output.at:641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall --graph input.y
@@ -5016,32 +5049,30 @@
 input.y:14.10-18: warning: rule useless in parser due to conflicts [-Wother]
 input.y:15.10-18: warning: rule useless in parser due to conflicts [-Wother]
 ./output.at:473: grep -v // input.gv
-144. output.at:744: testing C++ Output File Prefix Mapping ...
-./output.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -o out/x1.cc -M out/=bar/ x1.yy
-140. output.at:473:  ok
 stderr:
 input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr]
 input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
 input.y:4.3: warning: rule useless in parser due to conflicts [-Wother]
 ./output.at:538: grep -v // input.gv
+140. output.at:473:  ok
 141. output.at:538:  ok
 
+
 stderr:
 input.y: warning: 3 reduce/reduce conflicts [-Wconflicts-rr]
 input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
 input.y:2.14-18: warning: rule useless in parser due to conflicts [-Wother]
 input.y:5.3: warning: rule useless in parser due to conflicts [-Wother]
 ./output.at:576: grep -v // input.gv
-./output.at:775: $CXX $CPPFLAGS  $CXXFLAGS -Iout/include -c -o out/x1.o out/x1.cc 
-145. diagnostics.at:84: testing Warnings ...
-
 142. output.at:576:  ok
-146. diagnostics.at:133: testing Single point locations ...
+144. output.at:744: testing C++ Output File Prefix Mapping ...
+./output.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -o out/x1.cc -M out/=bar/ x1.yy
+145. diagnostics.at:84: testing Warnings ...
 ./diagnostics.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
 
+146. diagnostics.at:133: testing Single point locations ...
 ./diagnostics.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
-147. diagnostics.at:182: testing Line is too short, and then you die ...
-./diagnostics.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
+./output.at:775: $CXX $CPPFLAGS  $CXXFLAGS -Iout/include -c -o out/x1.o out/x1.cc 
 ./diagnostics.at:84: "$PERL" -pi -e '
   s{(</?(-|\w)+>)}{ $1 eq "<tag>" ? $1 : "" }ge;
   if (/Example/)
@@ -5050,22 +5081,12 @@
       $_ = "" if $example % 2 == 0;
     }
 ' experr || exit 77
-./diagnostics.at:182: "$PERL" -pi -e '
-  s{(</?(-|\w)+>)}{ $1 eq "<tag>" ? $1 : "" }ge;
-  if (/Example/)
-    {
-      ++$example;
-      $_ = "" if $example % 2 == 0;
-    }
-' experr || exit 77
-./diagnostics.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
-./diagnostics.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
-147. diagnostics.at:182:  ok
 stderr:
 stdout:
+134. output.at:339: ./diagnostics.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
+ ok
 
-132. output.at:337:  ok
-148. diagnostics.at:217: testing Zero-width characters ...
+147. diagnostics.at:182: testing Line is too short, and then you die ...
 ./diagnostics.at:133: "$PERL" -pi -e '
   s{(</?(-|\w)+>)}{ $1 eq "<tag>" ? $1 : "" }ge;
   if (/Example/)
@@ -5074,14 +5095,10 @@
       $_ = "" if $example % 2 == 0;
     }
 ' experr || exit 77
-
 ./diagnostics.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
+./diagnostics.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
 145. diagnostics.at:84:  ok
-./diagnostics.at:217: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77
-./diagnostics.at:217: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
-
-149. diagnostics.at:235: testing Tabulations and multibyte characters ...
-./diagnostics.at:217: "$PERL" -pi -e '
+./diagnostics.at:182: "$PERL" -pi -e '
   s{(</?(-|\w)+>)}{ $1 eq "<tag>" ? $1 : "" }ge;
   if (/Example/)
     {
@@ -5089,21 +5106,29 @@
       $_ = "" if $example % 2 == 0;
     }
 ' experr || exit 77
-./diagnostics.at:235: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
-./diagnostics.at:217: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
-150. diagnostics.at:282: testing Tabulations and multibyte characters ...
-148. diagnostics.at:217:  ok
-146. diagnostics.at:133:  ok
-./diagnostics.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
+./diagnostics.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
 stderr:
 stdout:
+stderr:
 
+stdout:
 136. output.at:341:  ok
+135. output.at:340:  ok
+147. diagnostics.at:182:  ok
+148. diagnostics.at:217: testing Zero-width characters ...
+
+
+
+./diagnostics.at:217: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77
+146. diagnostics.at:133:  ok
+150. diagnostics.at:282: testing Tabulations and multibyte characters ...
 151. diagnostics.at:303: testing Special files ...
+149. diagnostics.at:235: testing Tabulations and multibyte characters ...
+./diagnostics.at:217: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
+./diagnostics.at:235: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
 
 ./diagnostics.at:303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
-
-./diagnostics.at:235: "$PERL" -pi -e '
+./diagnostics.at:217: "$PERL" -pi -e '
   s{(</?(-|\w)+>)}{ $1 eq "<tag>" ? $1 : "" }ge;
   if (/Example/)
     {
@@ -5111,9 +5136,21 @@
       $_ = "" if $example % 2 == 0;
     }
 ' experr || exit 77
+./diagnostics.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
+./diagnostics.at:217: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
+148. diagnostics.at:217:  ok
 152. diagnostics.at:328: testing Complaints from M4 ...
-./diagnostics.at:235: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
+
+./diagnostics.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
 153. diagnostics.at:351: testing Carriage return ...
+./diagnostics.at:303: "$PERL" -pi -e '
+  s{(</?(-|\w)+>)}{ $1 eq "<tag>" ? $1 : "" }ge;
+  if (/Example/)
+    {
+      ++$example;
+      $_ = "" if $example % 2 == 0;
+    }
+' experr || exit 77
 ./diagnostics.at:282: "$PERL" -pi -e '
   s{(</?(-|\w)+>)}{ $1 eq "<tag>" ? $1 : "" }ge;
   if (/Example/)
@@ -5122,11 +5159,7 @@
       $_ = "" if $example % 2 == 0;
     }
 ' experr || exit 77
-./diagnostics.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
-./diagnostics.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
-./diagnostics.at:351: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77
-./diagnostics.at:351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
-./diagnostics.at:351: "$PERL" -pi -e '
+./diagnostics.at:235: "$PERL" -pi -e '
   s{(</?(-|\w)+>)}{ $1 eq "<tag>" ? $1 : "" }ge;
   if (/Example/)
     {
@@ -5134,8 +5167,12 @@
       $_ = "" if $example % 2 == 0;
     }
 ' experr || exit 77
-./diagnostics.at:351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
-./diagnostics.at:328: "$PERL" -pi -e '
+./diagnostics.at:303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
+./diagnostics.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
+./diagnostics.at:351: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77
+./diagnostics.at:235: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
+./diagnostics.at:351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
+./diagnostics.at:351: "$PERL" -pi -e '
   s{(</?(-|\w)+>)}{ $1 eq "<tag>" ? $1 : "" }ge;
   if (/Example/)
     {
@@ -5143,9 +5180,8 @@
       $_ = "" if $example % 2 == 0;
     }
 ' experr || exit 77
-150. diagnostics.at:282:  ok
-149. diagnostics.at:235:  ok
-./diagnostics.at:303: "$PERL" -pi -e '
+./diagnostics.at:351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
+./diagnostics.at:328: "$PERL" -pi -e '
   s{(</?(-|\w)+>)}{ $1 eq "<tag>" ? $1 : "" }ge;
   if (/Example/)
     {
@@ -5155,32 +5191,33 @@
 ' experr || exit 77
 153. diagnostics.at:351:  ok
 ./diagnostics.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
-./diagnostics.at:303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
-
 
-
-154. diagnostics.at:372: testing CR NL ...
-156. diagnostics.at:432: testing Screen width: 80 columns ...
-155. diagnostics.at:399: testing Screen width: 200 columns ...
+150. diagnostics.at:282:  ok
+151. diagnostics.at:303:  ok
 stderr:
 input.y: warning: 4 shift/reduce conflicts [-Wconflicts-sr]
 input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
 ./output.at:641: grep -v // input.gv
+
+
+154. diagnostics.at:372: testing CR NL ...
+149. diagnostics.at:235:  ok
 143. output.at:641:  ok
-151. diagnostics.at:303:  ok
 ./diagnostics.at:372: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77
-./diagnostics.at:432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale" COLUMNS=80 bison -fcaret --color=debug -Wall,cex input.y
-./diagnostics.at:399: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale" COLUMNS=200 bison -fcaret --color=debug -Wall,cex input.y
-152. diagnostics.at:328:  ok
-./diagnostics.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
 
+155. diagnostics.at:399: testing Screen width: 200 columns ...
 
+./diagnostics.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
+156. diagnostics.at:432: testing Screen width: 80 columns ...
+./diagnostics.at:432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale" COLUMNS=80 bison -fcaret --color=debug -Wall,cex input.y
+./diagnostics.at:399: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale" COLUMNS=200 bison -fcaret --color=debug -Wall,cex input.y
 157. diagnostics.at:465: testing Screen width: 60 columns ...
-
+152. diagnostics.at:328:  ok
 158. diagnostics.at:504: testing Suggestions ...
-159. diagnostics.at:527: testing Counterexamples ...
 ./diagnostics.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale" COLUMNS=60 bison -fcaret --color=debug -Wall,cex input.y
-./diagnostics.at:432: "$PERL" -pi -e '
+./diagnostics.at:504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
+
+./diagnostics.at:504: "$PERL" -pi -e '
   s{(</?(-|\w)+>)}{ $1 eq "<tag>" ? $1 : "" }ge;
   if (/Example/)
     {
@@ -5188,6 +5225,8 @@
       $_ = "" if $example % 2 == 0;
     }
 ' experr || exit 77
+./diagnostics.at:504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
+159. diagnostics.at:527: testing Counterexamples ...
 ./diagnostics.at:372: "$PERL" -pi -e '
   s{(</?(-|\w)+>)}{ $1 eq "<tag>" ? $1 : "" }ge;
   if (/Example/)
@@ -5196,7 +5235,10 @@
       $_ = "" if $example % 2 == 0;
     }
 ' experr || exit 77
-./diagnostics.at:399: "$PERL" -pi -e '
+158. diagnostics.at:504:  ok
+./diagnostics.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
+./diagnostics.at:527: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
+./diagnostics.at:432: "$PERL" -pi -e '
   s{(</?(-|\w)+>)}{ $1 eq "<tag>" ? $1 : "" }ge;
   if (/Example/)
     {
@@ -5204,12 +5246,7 @@
       $_ = "" if $example % 2 == 0;
     }
 ' experr || exit 77
-./diagnostics.at:504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
-./diagnostics.at:432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale" COLUMNS=80 bison -fcaret -Wall,cex input.y
-./diagnostics.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
-./diagnostics.at:399: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale" COLUMNS=200 bison -fcaret -Wall,cex input.y
-./diagnostics.at:527: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
-./diagnostics.at:504: "$PERL" -pi -e '
+./diagnostics.at:399: "$PERL" -pi -e '
   s{(</?(-|\w)+>)}{ $1 eq "<tag>" ? $1 : "" }ge;
   if (/Example/)
     {
@@ -5217,7 +5254,9 @@
       $_ = "" if $example % 2 == 0;
     }
 ' experr || exit 77
-./diagnostics.at:504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
+
+./diagnostics.at:399: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale" COLUMNS=200 bison -fcaret -Wall,cex input.y
+./diagnostics.at:432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale" COLUMNS=80 bison -fcaret -Wall,cex input.y
 ./diagnostics.at:465: "$PERL" -pi -e '
   s{(</?(-|\w)+>)}{ $1 eq "<tag>" ? $1 : "" }ge;
   if (/Example/)
@@ -5226,16 +5265,11 @@
       $_ = "" if $example % 2 == 0;
     }
 ' experr || exit 77
-158. diagnostics.at:504:  ok
+160. diagnostics.at:645: testing Deep Counterexamples ...
 ./diagnostics.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale" COLUMNS=60 bison -fcaret -Wall,cex input.y
+./diagnostics.at:645: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
 154. diagnostics.at:372:  ok
-156. diagnostics.at:432:  ok
-
-155. diagnostics.at:399:  ok
-
-
-160. diagnostics.at:645: testing Deep Counterexamples ...
-./diagnostics.at:527: "$PERL" -pi -e '
+./diagnostics.at:645: "$PERL" -pi -e '
   s{(</?(-|\w)+>)}{ $1 eq "<tag>" ? $1 : "" }ge;
   if (/Example/)
     {
@@ -5243,19 +5277,12 @@
       $_ = "" if $example % 2 == 0;
     }
 ' experr || exit 77
-./diagnostics.at:527: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
-162. skeletons.at:25: testing Relative skeleton file names ...
-./skeletons.at:27: mkdir tmp
-
-161. diagnostics.at:713: testing Indentation with message suppression ...
-./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -Wno-other input.y
-157. diagnostics.at:465:  ok
-./diagnostics.at:645: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret --color=debug -Wall,cex input.y
-./skeletons.at:63: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret tmp/input-gram.y
-163. skeletons.at:85: testing Installed skeleton file names ...
+./diagnostics.at:645: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
 
-./skeletons.at:120: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --skeleton=yacc.c -o input-cmd-line.c input-cmd-line.y
-./diagnostics.at:645: "$PERL" -pi -e '
+155. diagnostics.at:399:  ok
+160. diagnostics.at:645:  ok
+156. diagnostics.at:432:  ok
+./diagnostics.at:527: "$PERL" -pi -e '
   s{(</?(-|\w)+>)}{ $1 eq "<tag>" ? $1 : "" }ge;
   if (/Example/)
     {
@@ -5263,21 +5290,38 @@
       $_ = "" if $example % 2 == 0;
     }
 ' experr || exit 77
-./diagnostics.at:645: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
-./skeletons.at:64: cat input-gram.tab.c
+161. diagnostics.at:713: testing Indentation with message suppression ...
+./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -Wno-other input.y
+./diagnostics.at:527: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  LC_ALL="$locale"  bison -fcaret -Wall,cex input.y
+
+
+157. diagnostics.at:465:  ok
+
+
+163. skeletons.at:85: testing Installed skeleton file names ...
 164. skeletons.at:142: testing Boolean=variables: invalid skeleton defaults ...
-./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wno-other input.y -Werror
-160. diagnostics.at:645:  ok
-./skeletons.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input-gram.y
+162. skeletons.at:25: testing Relative skeleton file names ...
+./skeletons.at:27: mkdir tmp
 ./skeletons.at:155: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+./skeletons.at:120: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --skeleton=yacc.c -o input-cmd-line.c input-cmd-line.y
+./skeletons.at:63: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret tmp/input-gram.y
+165. skeletons.at:166: testing Complaining during macro argument expansion ...
+./skeletons.at:189: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input1.y
+./skeletons.at:64: cat input-gram.tab.c
+164. skeletons.at:142: ./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wno-other input.y -Werror
+ ok
+./skeletons.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input-gram.y
+
 ./skeletons.at:121: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input-cmd-line input-cmd-line.c $LIBS
 ./skeletons.at:69: cat input-gram.tab.c
-
 159. diagnostics.at:527:  ok
-164. skeletons.at:142:  ok
 ./skeletons.at:73: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --skeleton=tmp/skel.c tmp/input-cmd-line.y
+./skeletons.at:209: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input2.y
+166. skeletons.at:248: testing Fatal errors make M4 exit immediately ...
+./skeletons.at:262: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input1.y
+
 stderr:
-165. skeletons.at:166: testing Complaining during macro argument expansion ...
+./skeletons.at:74: cat input-cmd-line.tab.c
 input.y:2.1-12: error: deprecated directive: '%pure-parser', use '%define api.pure' [-Werror=deprecated]
     2 | %pure-parser
       | ^~~~~~~~~~~~
@@ -5286,28 +5330,30 @@
     3 | %error-verbose
       | ^~~~~~~~~~~~~~
       | %define parse.error verbose
-./skeletons.at:74: cat input-cmd-line.tab.c
-./skeletons.at:189: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input1.y
-
+167. skeletons.at:302: testing Fatal errors but M4 continues producing output ...
+./skeletons.at:314: "$PERL" gen-skel.pl > skel.c || exit 77
 162. skeletons.at:25:  ok
-
 ./diagnostics.at:725: sed 's,.*/$,,' stderr 1>&2
+./skeletons.at:223: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input3.y
 ./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wno-other input.y --warnings=error
+./skeletons.at:279: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input2.y
+./skeletons.at:322: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
 
-166. skeletons.at:248: testing Fatal errors make M4 exit immediately ...
-./skeletons.at:262: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input1.y
-167. skeletons.at:302: testing Fatal errors but M4 continues producing output ...
-./skeletons.at:314: "$PERL" gen-skel.pl > skel.c || exit 77
 168. sets.at:27: testing Nullable ...
-./skeletons.at:322: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
 ./sets.at:42: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --trace=sets input.y
+./skeletons.at:237: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input4.y
 167. skeletons.at:302:  ok
-./skeletons.at:209: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input2.y
+166. skeletons.at:248:  ok
 
-./skeletons.at:279: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input2.y
+
+165. skeletons.at:166:  ok
 ./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wno-other input.y -Wnone,none -Werror --trace=none
-stderr:
 169. sets.at:111: testing Broken Closure ...
+./sets.at:125: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --trace=sets input.y
+170. sets.at:153: testing Firsts ...
+./sets.at:171: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --trace=sets input.y
+
+stderr:
 bison (GNU Bison) 3.8.2
 RITEM
   0:   e  $end  (rule 0)
@@ -5380,17 +5426,13 @@
     rule 0:
 
 ./sets.at:43: sed -f extract.sed stderr
-./sets.at:125: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --trace=sets input.y
-./skeletons.at:223: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input3.y
+171. sets.at:228: testing Accept ...
+./sets.at:240: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -v -o input.c input.y
 168. sets.at:27:  ok
-166. skeletons.at:248:  ok
-./skeletons.at:237: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input4.y
 
 ./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wno-other input.y --warnings=none -Werror --trace=none
-
-170. sets.at:153: testing Firsts ...
-./sets.at:171: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --trace=sets input.y
-165. skeletons.at:166:  ok
+172. sets.at:269: testing Build relations ...
+./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.y
 stderr:
 bison (GNU Bison) 3.8.2
 RITEM
@@ -5633,15 +5675,6 @@
     rule 0:
 
 ./sets.at:127: sed -n 's/[   ]*$//;/^RTC: Firsts Output BEGIN/,/^RTC: Firsts Output END/p' stderr
-171. sets.at:228: testing Accept ...
-./sets.at:240: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -v -o input.c input.y
-
-169. sets.at:111:  ok
-161. diagnostics.at:713:  ok
-172. sets.at:269: testing Build relations ...
-
-./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.y
-
 stderr:
 bison (GNU Bison) 3.8.2
 RITEM
@@ -5764,14 +5797,12 @@
     rule 6: $end '<' '>' '+' '-' '^' '='
 
 ./sets.at:172: sed -f extract.sed stderr
-173. sets.at:315: testing Reduced Grammar ...
-./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --trace=grammar -o input.c input.y
+169. sets.at:111:  ok
+170. sets.at:153:  ok
 ./sets.at:243: sed -n 's/.*define YYFINAL *\([0-9][0-9]*\)/final state \1/p' input.c
 stdout:
-170. sets.at:153:  ok
 final state 6
-174. sets.at:394: testing Reduced Grammar with prec and assoc ...
-./sets.at:412: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --trace=grammar -o input.c input.y
+
 ./sets.at:248: sed -n '
            /^State \(.*\)/{
              s//final state \1/
@@ -5783,16 +5814,22 @@
              q
            }
         ' input.output
-./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
+
 171. sets.at:228:  ok
+161. diagnostics.at:713:  ok
+173. sets.at:315: testing Reduced Grammar ...
+./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --trace=grammar -o input.c input.y
 
+174. sets.at:394: testing Reduced Grammar with prec and assoc ...
+./sets.at:412: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --trace=grammar -o input.c input.y
+./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
 
 175. reduce.at:26: testing Useless Terminals ...
 ./reduce.at:47: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.y
 176. reduce.at:70: testing Useless Nonterminals ...
 ./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input.y
-./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --trace=grammar -o input.c input.y -Werror
 stderr:
+./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --trace=grammar -o input.c input.y -Werror
 input.y: error: 5 reduce/reduce conflicts [-Werror=conflicts-rr]
 input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
 input.y:2.14-17: error: rule useless in parser due to conflicts [-Werror=other]
@@ -5813,9 +5850,14 @@
 174. sets.at:394:  ok
 ./sets.at:286: sed 's,.*/$,,' stderr 1>&2
 
-stderr:
-./reduce.at:49: sed -n '/^Grammar/q;/^$/!p' input.output
 ./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=error
+./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Werror
+./reduce.at:49: sed -n '/^Grammar/q;/^$/!p' input.output
+177. reduce.at:120: testing Useless Rules ...
+./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.y
+175. reduce.at:26:  ok
+
+stderr:
 bison (GNU Bison) 3.8.2
 input.y: error: 1 nonterminal useless in grammar [-Werror=other]
 input.y: error: 1 rule useless in grammar [-Werror=other]
@@ -5873,27 +5915,27 @@
 
 
 reduced input.y defines 7 terminals, 4 nonterminals, and 6 productions.
-177. reduce.at:120: testing Useless Rules ...
-./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.y
-175. reduce.at:26:  ok
-./sets.at:325: sed 's,.*/$,,' stderr 1>&2
-./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Werror
-
-./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --trace=grammar -o input.c input.y --warnings=error
-178. reduce.at:224: testing Useless Parts ...
-./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -rall -o input.c input.y
-./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
-./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none
 stderr:
 input.y: error: 3 nonterminals useless in grammar [-Werror=other]
 input.y: error: 3 rules useless in grammar [-Werror=other]
 input.y:11.1-8: error: nonterminal useless in grammar: useless1 [-Werror=other]
 input.y:12.1-8: error: nonterminal useless in grammar: useless2 [-Werror=other]
 input.y:13.1-8: error: nonterminal useless in grammar: useless3 [-Werror=other]
-./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --trace=grammar -o input.c input.y -Wnone,none -Werror --trace=none
+178. reduce.at:224: testing Useless Parts ...
+./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -rall -o input.c input.y
+./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none
+./sets.at:325: sed 's,.*/$,,' stderr 1>&2
 ./reduce.at:89: sed 's,.*/$,,' stderr 1>&2
-./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -rall -o input.c input.y -Werror
+./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --trace=grammar -o input.c input.y --warnings=error
 ./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=error
+./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
+./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
+stderr:
+./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -rall -o input.c input.y -Werror
+stdout:
+./output.at:342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "].cc" --header="].hh" cxx.y
+./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
+./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --trace=grammar -o input.c input.y -Wnone,none -Werror --trace=none
 stderr:
 input.y: error: 9 nonterminals useless in grammar [-Werror=other]
 input.y: error: 9 rules useless in grammar [-Werror=other]
@@ -5924,44 +5966,46 @@
 input.y:18.1-8: error: nonterminal useless in grammar: useless9 [-Werror=other]
    18 | useless9: '9';
       | ^~~~~~~~
-./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
+stderr:
 ./reduce.at:146: sed 's,.*/$,,' stderr 1>&2
-./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --trace=grammar -o input.c input.y --warnings=none -Werror --trace=none
 stderr:
 input.y: error: 1 nonterminal useless in grammar [-Werror=other]
 input.y: error: 1 rule useless in grammar [-Werror=other]
 input.y:18.1-6: error: nonterminal useless in grammar: unused [-Werror=other]
    18 | unused
       | ^~~~~~
-./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none
-./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=error
-./reduce.at:261: sed 's,.*/$,,' stderr 1>&2
-./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -rall -o input.c input.y --warnings=error
-stderr:
-stdout:
+./output.at:342: ls "].cc" "].hh"
 172. sets.at:269:  ok
+stdout:
 ./skeletons.at:122:  $PREPARSER ./input-cmd-line
+stdout:
+].cc
+].hh
 stderr:
+./output.at:342: $CXX $CPPFLAGS  $CXXFLAGS -c -o cxx.o -c "].cc" 
 syntax error, unexpected 'a', expecting end of file
 ./skeletons.at:122: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./skeletons.at:126: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input-gram.c input-gram.y
-173. sets.at:315:  ok
-
-./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -rall -o input.c input.y -Wnone,none -Werror --trace=none
+./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=error
+./reduce.at:261: sed 's,.*/$,,' stderr 1>&2
 ./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none
+./skeletons.at:126: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input-gram.c input-gram.y
 
+./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -rall -o input.c input.y --warnings=error
+./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --trace=grammar -o input.c input.y --warnings=none -Werror --trace=none
 179. reduce.at:312: testing Reduced Automaton ...
 ./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret not-reduced.y
+./reduce.at:97: sed -n '/^Grammar/q;/^$/!p' input.output
+./skeletons.at:127: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input-gram input-gram.c $LIBS
+./reduce.at:109: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
+./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -rall -o input.c input.y -Wnone,none -Werror --trace=none
 ./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none
+173. sets.at:315:  ok
+./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret not-reduced.y -Werror
+
 180. reduce.at:406: testing Underivable Rules ...
 ./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.y
-./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret not-reduced.y -Werror
-./skeletons.at:127: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input-gram input-gram.c $LIBS
 ./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -rall -o input.c input.y --warnings=none -Werror --trace=none
-./reduce.at:97: sed -n '/^Grammar/q;/^$/!p' input.output
 ./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
-./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
-./reduce.at:109: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
 stderr:
 not-reduced.y: error: 2 nonterminals useless in grammar [-Werror=other]
 not-reduced.y: error: 3 rules useless in grammar [-Werror=other]
@@ -5974,9 +6018,14 @@
 not-reduced.y:11.6-57: error: rule useless in grammar [-Werror=other]
    11 |    | non_productive    { /* A non productive action. */ }
       |      ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-stderr:
 ./reduce.at:341: sed 's,.*/$,,' stderr 1>&2
-stdout:
+./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret not-reduced.y --warnings=error
+./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Werror
+./reduce.at:270: sed -n '/^State 0/q;/^$/!p' input.output
+./reduce.at:298: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
+./reduce.at:179: sed -n '/^Grammar/q;/^$/!p' input.output
+./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret not-reduced.y -Wnone,none -Werror --trace=none
+./reduce.at:213: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
 stderr:
 input.y: error: 2 nonterminals useless in grammar [-Werror=other]
 input.y: error: 3 rules useless in grammar [-Werror=other]
@@ -5989,73 +6038,61 @@
 input.y:5.15-25: error: rule useless in grammar [-Werror=other]
     5 | exp: useful | underivable;
       |               ^~~~~~~~~~~
-./output.at:342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o "].cc" --header="].hh" cxx.y
-./reduce.at:270: sed -n '/^State 0/q;/^$/!p' input.output
-./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret not-reduced.y --warnings=error
 ./reduce.at:420: sed 's,.*/$,,' stderr 1>&2
-./reduce.at:298: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
-./reduce.at:179: sed -n '/^Grammar/q;/^$/!p' input.output
 ./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=error
-./reduce.at:213: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
-./output.at:342: ls "].cc" "].hh"
-./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret not-reduced.y -Wnone,none -Werror --trace=none
+stderr:
 stdout:
-].cc
-].hh
-./output.at:342: $CXX $CPPFLAGS  $CXXFLAGS -c -o cxx.o -c "].cc" 
+176. reduce.at:70: ./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret not-reduced.y --warnings=none -Werror --trace=none
+ ok
+
 ./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none
-./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret not-reduced.y --warnings=none -Werror --trace=none
-./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
+181. reduce.at:452: testing Bad start symbols ...
+./reduce.at:467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+./reduce.at:473: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
 ./reduce.at:355: sed -n '/^Grammar/q;/^$/!p' not-reduced.output
+./reduce.at:480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+./reduce.at:392: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret reduced.y
+./reduce.at:488: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none
+./reduce.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+./reduce.at:505: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
 stderr:
+181. reduce.at:452:  ok
 stdout:
-stderr:
 178. reduce.at:224:  ok
-./reduce.at:392: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret reduced.y
-stdout:
-176. reduce.at:70:  ok
 
+./reduce.at:396: sed 's/not-reduced/reduced/g' not-reduced.c
 
 ./reduce.at:434: sed -n '/^Grammar/q;/^$/!p' input.output
+179. reduce.at:312:  ok
 180. reduce.at:406:  ok
 182. reduce.at:550: testing no lr.type: Single State Split ...
 ./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
-./reduce.at:396: sed 's/not-reduced/reduced/g' not-reduced.c
-181. reduce.at:452: testing Bad start symbols ...
-179. reduce.at:312:  ok
-./reduce.at:467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-
-./reduce.at:473: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-
-./reduce.at:550: sed -n '/^State 0$/,$p' input.output
 183. reduce.at:550: testing lr.type=lalr: Single State Split ...
 ./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
-./reduce.at:480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-./reduce.at:550: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-184. reduce.at:550: testing lr.type=ielr: Single State Split ...
-./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
-./reduce.at:488: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+
+
 stderr:
-./reduce.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
 stdout:
+184. reduce.at:550: testing lr.type=ielr: Single State Split ...
+./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
 177. reduce.at:120:  ok
-./reduce.at:505: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-./reduce.at:550: sed -n '/^State 0$/,$p' input.output
-./reduce.at:550: sed -n '/^State 0$/,$p' input.output
-
-./reduce.at:550: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-181. reduce.at:452:  ok
-./reduce.at:550: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 185. reduce.at:550: testing lr.type=canonical-lr: Single State Split ...
 ./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
 
+./reduce.at:550: sed -n '/^State 0$/,$p' input.output
 186. reduce.at:783: testing no lr.type: Lane Split ...
 ./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
-./reduce.at:783: sed -n '/^State 0$/,$p' input.output
 ./reduce.at:550: sed -n '/^State 0$/,$p' input.output
-./reduce.at:783: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./reduce.at:550: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./reduce.at:550: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./reduce.at:550: sed -n '/^State 0$/,$p' input.output
+./reduce.at:550: sed -n '/^State 0$/,$p' input.output
+./reduce.at:550: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./reduce.at:550: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./reduce.at:783: sed -n '/^State 0$/,$p' input.output
 stderr:
+./reduce.at:783: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stdout:
 ./skeletons.at:128:  $PREPARSER ./input-gram
 stderr:
@@ -6063,65 +6100,65 @@
 ./skeletons.at:128: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 163. skeletons.at:85:  ok
 
+187. reduce.at:783: testing lr.type=lalr: Lane Split ...
+./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
+./reduce.at:783: sed -n '/^State 0$/,$p' input.output
+./reduce.at:783: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
 ./reduce.at:550:  $PREPARSER ./input
 stderr:
-187. reduce.at:783: testing lr.type=lalr: Lane Split ...
-./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
 syntax error
+stderr:
 ./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stdout:
+./reduce.at:550:  $PREPARSER ./input
+stderr:
+syntax error
+183. reduce.at:550: ./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+ ok
 182. reduce.at:550:  ok
-
-./reduce.at:783: sed -n '/^State 0$/,$p' input.output
-188. reduce.at:783: testing lr.type=ielr: Lane Split ...
-./reduce.at:783: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
 stderr:
 stdout:
+
 ./reduce.at:550:  $PREPARSER ./input
 stderr:
-syntax error
-./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./reduce.at:783: sed -n '/^State 0$/,$p' input.output
-183. reduce.at:550:  ok
-./reduce.at:783: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
+./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stdout:
 ./reduce.at:550:  $PREPARSER ./input
 
 stderr:
-./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 184. reduce.at:550:  ok
+./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+185. reduce.at:550:  ok
+188. reduce.at:783: testing lr.type=ielr: Lane Split ...
+./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
+
 189. reduce.at:783: testing lr.type=canonical-lr: Lane Split ...
 ./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
+
 stderr:
 stdout:
-
 ./reduce.at:783:  $PREPARSER ./input
 stderr:
 syntax error
 ./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 190. reduce.at:1027: testing no lr.type: Complex Lane Split ...
+./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
 186. reduce.at:783:  ok
+191. reduce.at:1027: testing lr.type=lalr: Complex Lane Split ...
 ./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
 
-stderr:
 ./reduce.at:783: sed -n '/^State 0$/,$p' input.output
-191. reduce.at:1027: testing lr.type=lalr: Complex Lane Split ...
-stdout:
+192. reduce.at:1027: testing lr.type=ielr: Complex Lane Split ...
+./reduce.at:783: sed -n '/^State 0$/,$p' input.output
 ./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
-./reduce.at:550:  $PREPARSER ./input
 ./reduce.at:783: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-stderr:
-./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./reduce.at:783: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./reduce.at:1027: sed -n '/^State 0$/,$p' input.output
-185. reduce.at:550:  ok
 ./reduce.at:1027: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-
 ./reduce.at:1027: sed -n '/^State 0$/,$p' input.output
-192. reduce.at:1027: testing lr.type=ielr: Complex Lane Split ...
-./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
 ./reduce.at:1027: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./reduce.at:1027: sed -n '/^State 0$/,$p' input.output
 ./reduce.at:1027: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
@@ -6132,198 +6169,196 @@
 syntax error
 ./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 187. reduce.at:783:  ok
-stderr:
 
-stdout:
-./reduce.at:783:  $PREPARSER ./input
-stderr:
-./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 193. reduce.at:1027: testing lr.type=canonical-lr: Complex Lane Split ...
-188. reduce.at:783:  ok
-stderr:
 ./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
+./reduce.at:1027: sed -n '/^State 0$/,$p' input.output
+./reduce.at:1027: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+stderr:
+stderr:
+stdout:
 stdout:
 ./reduce.at:783:  $PREPARSER ./input
+./reduce.at:783:  $PREPARSER ./input
+stderr:
 stderr:
 ./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-
-189. reduce.at:783:  ok
-
-194. reduce.at:1296: testing no lr.type: Split During Added Lookahead Propagation ...
-./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
-195. reduce.at:1296: testing lr.type=lalr: Split During Added Lookahead Propagation ...
-./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
-./reduce.at:1027: sed -n '/^State 0$/,$p' input.output
-./reduce.at:1027: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
+./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+188. reduce.at:783: 189. reduce.at:783:  ok
+ ok
 stderr:
 stdout:
 ./reduce.at:1027:  $PREPARSER ./input
 stderr:
 syntax error
+
 ./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
+
 stderr:
-190. reduce.at:1027:  ok
 stdout:
+190. reduce.at:1027:  ok
 ./reduce.at:1027:  $PREPARSER ./input
 stderr:
 syntax error
 ./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+195. reduce.at:1296: testing lr.type=lalr: Split During Added Lookahead Propagation ...
+194. reduce.at:1296: testing no lr.type: Split During Added Lookahead Propagation ...
+./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
 
-191. reduce.at:1027:  ok
-196. reduce.at:1296: testing lr.type=ielr: Split During Added Lookahead Propagation ...
 ./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
-stderr:
-input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr]
-input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+191. reduce.at:1027:  ok
 
-./reduce.at:1296: sed 's,.*/$,,' stderr 1>&2
-stderr:
-input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr]
-input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error
 stderr:
-197. reduce.at:1296: testing lr.type=canonical-lr: Split During Added Lookahead Propagation ...
-./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
 stdout:
-./reduce.at:1296: sed 's,.*/$,,' stderr 1>&2
 ./reduce.at:1027:  $PREPARSER ./input
+196. reduce.at:1296: testing lr.type=ielr: Split During Added Lookahead Propagation ...
+./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
 stderr:
-./reduce.at:1296: sed -n '/^State 0$/,$p' input.output
-./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error
 ./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 192. reduce.at:1027:  ok
-./reduce.at:1296: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./reduce.at:1296: sed -n '/^State 0$/,$p' input.output
+197. reduce.at:1296: testing lr.type=canonical-lr: Split During Added Lookahead Propagation ...
+./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
 
-./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
-./reduce.at:1296: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 198. reduce.at:1627: testing no lr.default-reduction ...
 ./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
-./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
-./reduce.at:1627: sed -n '/^State 0$/,$p' input.output
-./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none
-./reduce.at:1627: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none
+./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
+./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
 ./reduce.at:1296: sed -n '/^State 0$/,$p' input.output
 ./reduce.at:1296: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./reduce.at:1296: sed -n '/^State 0$/,$p' input.output
 ./reduce.at:1296: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./reduce.at:1627: sed -n '/^State 0$/,$p' input.output
+./reduce.at:1627: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
-stdout:
-./reduce.at:1296:  $PREPARSER ./input
+input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr]
+input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
 stderr:
+input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr]
+input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+./reduce.at:1296: sed 's,.*/$,,' stderr 1>&2
+./reduce.at:1296: sed 's,.*/$,,' stderr 1>&2
+./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error
+./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error
 stderr:
-./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stdout:
 ./reduce.at:1027:  $PREPARSER ./input
-196. reduce.at:1296:  ok
 stderr:
 ./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 193. reduce.at:1027:  ok
 
-
-200. reduce.at:1627: testing lr.default-reduction=consistent ...
-./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
 199. reduce.at:1627: testing lr.default-reduction=most ...
 ./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
+./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
+./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
 ./reduce.at:1627: sed -n '/^State 0$/,$p' input.output
 ./reduce.at:1627: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none
+./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none
+./reduce.at:1296: sed -n '/^State 0$/,$p' input.output
+./reduce.at:1296: sed -n '/^State 0$/,$p' input.output
+./reduce.at:1296: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./reduce.at:1296: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
-./reduce.at:1627: sed -n '/^State 0$/,$p' input.output
-./reduce.at:1627:  $PREPARSER ./input
+./reduce.at:1296:  $PREPARSER ./input
 stderr:
-./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./reduce.at:1627: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-198. reduce.at:1627:  ok
-
+./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+196. reduce.at:1296:  ok
 stderr:
 stderr:
-201. reduce.at:1627: testing lr.default-reduction=accepting ...
-./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
+
 stdout:
 stdout:
 ./reduce.at:1296:  $PREPARSER ./input
-./reduce.at:1296:  $PREPARSER ./input
+./reduce.at:1627:  $PREPARSER ./input
 stderr:
 stderr:
-syntax error
-./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-197. reduce.at:1296:  ok
-194. reduce.at:1296:  ok
+./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+197. reduce.at:1296: 198. reduce.at:1627:  ok
+ ok
+200. reduce.at:1627: testing lr.default-reduction=consistent ...
+./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
 
 stderr:
+
 stdout:
 ./output.at:782: sed -ne 's/#line [0-9][0-9]* "/#line "/p;/INCLUDED/p;/\\file/{p;n;p;}' out/include/ast/loc.hh
-
 ./output.at:794: sed -ne 's/^#line [0-9][0-9]* "/#line "/p;/INCLUDED/p;/\\file/{p;n;p;}' out/x1.hh
-203. report.at:3123: testing Reports with conflicts ...
 ./output.at:806: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -o out/x2.cc -M out/=bar/ x2.yy
+201. reduce.at:1627: testing lr.default-reduction=accepting ...
+./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
 202. report.at:37: testing Reports ...
-./reduce.at:1627: sed -n '/^State 0$/,$p' input.output
 202. report.at:37:  skipped (report.at:75)
-./reduce.at:1627: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-203. report.at:3123:  skipped (report.at:3132)
-
 
+./reduce.at:1627: sed -n '/^State 0$/,$p' input.output
+./reduce.at:1627: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+203. report.at:3123: testing Reports with conflicts ...
 ./output.at:806: $CXX $CPPFLAGS  $CXXFLAGS -Iout/include -c -o out/x2.o out/x2.cc 
+203. report.at:3123: ./reduce.at:1627: sed -n '/^State 0$/,$p' input.output
+ skipped (report.at:3132)
+
+./reduce.at:1627: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 204. conflicts.at:28: testing Token declaration order ...
 ./conflicts.at:81: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+stderr:
+stdout:
+./reduce.at:1627:  $PREPARSER ./input
+stderr:
+./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+199. reduce.at:1627:  ok
+
 205. conflicts.at:101: testing Token declaration order: literals vs. identifiers ...
 ./conflicts.at:130: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --report=all -o input.c input.y
 stderr:
+stdout:
+./conflicts.at:82: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
+./reduce.at:1296:  $PREPARSER ./input
 stdout:
+stderr:
+syntax error
 ./reduce.at:1296:  $PREPARSER ./input
+./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./conflicts.at:131: cat input.output | sed -n '/^State 0$/,/^State 1$/p'
 syntax error
 ./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-205. conflicts.at:101:  ok
-./conflicts.at:82: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-
 195. reduce.at:1296:  ok
+194. reduce.at:1296:  ok
+
 
 206. conflicts.at:183: testing Useless associativity warning ...
 ./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wprecedence input.y
 207. conflicts.at:218: testing Useless precedence warning ...
 ./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y
 stderr:
-stdout:
-./reduce.at:1627:  $PREPARSER ./input
-stderr:
-stderr:
-./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-200. reduce.at:1627:  ok
-stdout:
-./reduce.at:1627:  $PREPARSER ./input
-stderr:
-./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wprecedence input.y -Werror
-./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-199. reduce.at:1627:  ok
-
+./conflicts.at:131: cat input.output | sed -n '/^State 0$/,/^State 1$/p'
+205. conflicts.at:101:  ok
 
 208. conflicts.at:275: testing S/R in initial ...
 ./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wprecedence input.y -Werror
 ./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y -Werror
-209. conflicts.at:301: testing %nonassoc and eof ...
-./conflicts.at:368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
-./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Werror
 stderr:
 stdout:
-./conflicts.at:84:  $PREPARSER ./input
+./reduce.at:1627:  $PREPARSER ./input
+./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Werror
 stderr:
-./conflicts.at:84: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./conflicts.at:368: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-204. conflicts.at:28:  ok
+./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+200. reduce.at:1627:  ok
 stderr:
+stdout:
+./conflicts.at:84:  $PREPARSER ./input
 stderr:
 input.y:2.1-9: error: useless precedence and associativity for "=" [-Werror=precedence]
 input.y:4.1-5: error: useless associativity for "*", use %precedence [-Werror=precedence]
 input.y:5.1-11: error: useless precedence for "(" [-Werror=precedence]
+stderr:
+./conflicts.at:84: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
+204. conflicts.at:28:  ok
+./conflicts.at:205: sed 's,.*/$,,' stderr 1>&2
+stderr:
 input.y:7.1-9: error: useless precedence and associativity for U [-Werror=precedence]
     7 | %nonassoc U
       | ^~~~~~~~~
@@ -6336,87 +6371,94 @@
 input.y:2.1-11: error: useless precedence for Z [-Werror=precedence]
     2 | %precedence Z
       | ^~~~~~~~~~~
+./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wprecedence input.y --warnings=error
 
-./conflicts.at:205: sed 's,.*/$,,' stderr 1>&2
 ./conflicts.at:248: sed 's,.*/$,,' stderr 1>&2
-./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wprecedence input.y --warnings=error
-./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y --warnings=error
 stderr:
-input.y:4.10-15: error: rule useless in parser due to conflicts [-Werror=other]
-210. conflicts.at:509: testing parse.error=verbose and consistent errors: lr.type=ielr ...
-./conflicts.at:509: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
-stderr:
-./conflicts.at:284: sed 's,.*/$,,' stderr 1>&2
 stdout:
 ./reduce.at:1627:  $PREPARSER ./input
+./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y --warnings=error
+209. conflicts.at:301: testing %nonassoc and eof ...
+./conflicts.at:368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 stderr:
-./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=error
+stderr:
+input.y:4.10-15: error: rule useless in parser due to conflicts [-Werror=other]
 ./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 201. reduce.at:1627:  ok
+./conflicts.at:284: sed 's,.*/$,,' stderr 1>&2
+210. conflicts.at:509: testing parse.error=verbose and consistent errors: lr.type=ielr ...
+./conflicts.at:509: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=error
 
-./conflicts.at:509: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wprecedence input.y -Wnone,none -Werror --trace=none
 211. conflicts.at:513: testing parse.error=verbose and consistent errors: lr.type=ielr %glr-parser ...
-./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y -Wnone,none -Werror --trace=none
 ./conflicts.at:513: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wprecedence input.y -Wnone,none -Werror --trace=none
+./conflicts.at:368: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./conflicts.at:509: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y -Wnone,none -Werror --trace=none
 ./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none
 ./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wprecedence input.y --warnings=none -Werror --trace=none
 ./conflicts.at:513: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y --warnings=none -Werror --trace=none
 ./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none
+./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y --warnings=none -Werror --trace=none
 206. conflicts.at:183:  ok
-
 ./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -o input.c input.y
-207. conflicts.at:218:  ok
+
 212. conflicts.at:518: testing parse.error=verbose and consistent errors: lr.type=ielr c++ ...
 ./conflicts.at:518: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+207. conflicts.at:218:  ok
 
-./conflicts.at:518: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y -Werror
+137. output.at:342:  ok
 213. conflicts.at:523: testing parse.error=verbose and consistent errors: lr.type=ielr java ...
 ./conflicts.at:523: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.java input.y
-./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y -Werror
-213. conflicts.at:523:  skipped (conflicts.at:523)
+
+./conflicts.at:518: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+214. conflicts.at:530: testing parse.error=verbose and consistent errors: lr.type=ielr lr.default-reduction=consistent ...
+./conflicts.at:530: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 stderr:
 input.y:4.10-15: error: rule useless in parser due to conflicts [-Werror=other]
     4 | e: 'e' | %empty;
       |          ^~~~~~
-
 ./conflicts.at:288: sed 's,.*/$,,' stderr 1>&2
-./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error
-214. conflicts.at:530: testing parse.error=verbose and consistent errors: lr.type=ielr lr.default-reduction=consistent ...
-./conflicts.at:530: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+213. conflicts.at:523: ./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error
+ skipped (conflicts.at:523)
+
 ./conflicts.at:530: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+215. conflicts.at:535: testing parse.error=verbose and consistent errors: lr.type=ielr lr.default-reduction=accepting ...
+./conflicts.at:535: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 ./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none
+./conflicts.at:535: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none
 208. conflicts.at:275:  ok
 
+216. conflicts.at:540: testing parse.error=verbose and consistent errors: lr.type=canonical-lr ...
+./conflicts.at:540: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+./conflicts.at:540: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
-137. output.at:342:  ok
-215. conflicts.at:535: testing parse.error=verbose and consistent errors: lr.type=ielr lr.default-reduction=accepting ...
-./conflicts.at:535: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+./conflicts.at:509:  $PREPARSER ./input
+stderr:
+syntax error, unexpected end of file
+./conflicts.at:509: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+210. conflicts.at:509:  ok
 
 stderr:
-216. conflicts.at:540: testing parse.error=verbose and consistent errors: lr.type=canonical-lr ...
-./conflicts.at:540: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 stdout:
 ./conflicts.at:368:  $PREPARSER ./input '0<0'
 stderr:
-./conflicts.at:535: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./conflicts.at:368:  $PREPARSER ./input '0<0<0'
 stderr:
 syntax error, unexpected '<'
-stderr:
 ./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stdout:
-./conflicts.at:509:  $PREPARSER ./input
+217. conflicts.at:546: testing parse.error=verbose and consistent errors: lr.type=canonical-lr parse.lac=full ...
+./conflicts.at:546: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 ./conflicts.at:368:  $PREPARSER ./input '0>0'
 stderr:
-stderr:
-syntax error, unexpected end of file
 ./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./conflicts.at:509: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./conflicts.at:368:  $PREPARSER ./input '0>0>0'
 stderr:
 syntax error, unexpected '>'
@@ -6424,13 +6466,8 @@
 ./conflicts.at:368:  $PREPARSER ./input '0<0>0'
 stderr:
 syntax error, unexpected '>'
-210. conflicts.at:509: ./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
- ok
+./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./conflicts.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dlr.default-reduction=consistent -o input.c input.y
-
-./conflicts.at:540: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-217. conflicts.at:546: testing parse.error=verbose and consistent errors: lr.type=canonical-lr parse.lac=full ...
-./conflicts.at:546: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 ./conflicts.at:546: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./conflicts.at:372: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
@@ -6441,12 +6478,11 @@
 ./conflicts.at:530: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 214. conflicts.at:530:  ok
 
-218. conflicts.at:551: testing parse.error=verbose and consistent errors: lr.type=ielr parse.lac=full ...
-./conflicts.at:551: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
-./conflicts.at:551: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
 ./conflicts.at:535:  $PREPARSER ./input
+218. conflicts.at:551: testing parse.error=verbose and consistent errors: lr.type=ielr parse.lac=full ...
+./conflicts.at:551: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 stderr:
 syntax error, unexpected end of file, expecting 'a' or 'b'
 ./conflicts.at:535: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -6454,6 +6490,7 @@
 
 219. conflicts.at:558: testing parse.error=verbose and consistent errors: c++ lr.type=canonical-lr parse.lac=full ...
 ./conflicts.at:558: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+./conflicts.at:551: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./conflicts.at:558: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
@@ -6461,14 +6498,17 @@
 stderr:
 syntax error, unexpected end of file, expecting 'a' or 'b'
 ./conflicts.at:540: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+216. conflicts.at:540:  ok
+
+220. conflicts.at:564: testing parse.error=verbose and consistent errors: c++ lr.type=ielr parse.lac=full ...
+./conflicts.at:564: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+./conflicts.at:564: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-216. conflicts.at:540:  ok
 ./conflicts.at:372:  $PREPARSER ./input '0<0'
 stderr:
 ./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./conflicts.at:372:  $PREPARSER ./input '0<0<0'
-
 stderr:
 syntax error, unexpected '<', expecting end of file
 ./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -6477,8 +6517,6 @@
 ./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./conflicts.at:372:  $PREPARSER ./input '0>0>0'
 stderr:
-220. conflicts.at:564: testing parse.error=verbose and consistent errors: c++ lr.type=ielr parse.lac=full ...
-./conflicts.at:564: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
 syntax error, unexpected '>', expecting end of file
 ./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./conflicts.at:372:  $PREPARSER ./input '0<0>0'
@@ -6486,8 +6524,6 @@
 syntax error, unexpected '>', expecting end of file
 ./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./conflicts.at:381: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y
-./conflicts.at:564: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-./conflicts.at:381: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
 ./conflicts.at:546:  $PREPARSER ./input
@@ -6496,6 +6532,7 @@
 ./conflicts.at:546: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 217. conflicts.at:546:  ok
 
+./conflicts.at:381: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 221. conflicts.at:622: testing parse.error=verbose and consistent errors:  ...
 ./conflicts.at:622: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 ./conflicts.at:622: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
@@ -6506,9 +6543,21 @@
 syntax error, unexpected end of file, expecting 'b'
 ./conflicts.at:551: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 218. conflicts.at:551:  ok
+stderr:
+stdout:
+./conflicts.at:513:  $PREPARSER ./input
 
+stderr:
+syntax error, unexpected end of file
+./conflicts.at:513: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+211. conflicts.at:513:  ok
 222. conflicts.at:626: testing parse.error=verbose and consistent errors: %glr-parser ...
 ./conflicts.at:626: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+
+223. conflicts.at:632: testing parse.error=verbose and consistent errors: lr.default-reduction=consistent ...
+./conflicts.at:632: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+./conflicts.at:626: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./conflicts.at:632: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
 ./conflicts.at:381:  $PREPARSER ./input '0<0'
@@ -6518,7 +6567,6 @@
 stderr:
 syntax error, unexpected '<', expecting end of file
 ./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./conflicts.at:626: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./conflicts.at:381:  $PREPARSER ./input '0>0'
 stderr:
 ./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -6530,19 +6578,8 @@
 stderr:
 syntax error, unexpected '>', expecting end of file
 ./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 ./conflicts.at:388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dparse.lac=full -o input.c input.y
-stdout:
-./conflicts.at:513:  $PREPARSER ./input
-stderr:
-syntax error, unexpected end of file
-./conflicts.at:513: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-211. conflicts.at:513:  ok
-
 ./conflicts.at:388: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-223. conflicts.at:632: testing parse.error=verbose and consistent errors: lr.default-reduction=consistent ...
-./conflicts.at:632: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
-./conflicts.at:632: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
 ./conflicts.at:622:  $PREPARSER ./input
@@ -6559,6 +6596,17 @@
 ./output.at:835: $CXX $CPPFLAGS  $CXXFLAGS -Iout/ $LDFLAGS -o parser out/x[12].o main.cc $LIBS
 stderr:
 stdout:
+./conflicts.at:632:  $PREPARSER ./input
+stderr:
+syntax error, unexpected 'b'
+./conflicts.at:632: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+223. conflicts.at:632:  ok
+
+225. conflicts.at:642: testing parse.error=verbose and consistent errors: lr.type=canonical-lr ...
+./conflicts.at:642: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+./conflicts.at:642: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+stderr:
+stdout:
 ./conflicts.at:388:  $PREPARSER ./input '0<0'
 stderr:
 ./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -6566,35 +6614,23 @@
 stderr:
 syntax error, unexpected '<', expecting end of file
 ./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 ./conflicts.at:388:  $PREPARSER ./input '0>0'
-stdout:
 stderr:
 ./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./conflicts.at:632:  $PREPARSER ./input
-stderr:
 ./conflicts.at:388:  $PREPARSER ./input '0>0>0'
-syntax error, unexpected 'b'
-./conflicts.at:632: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 syntax error, unexpected '>', expecting end of file
 ./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-223. conflicts.at:632:  ok
 ./conflicts.at:388:  $PREPARSER ./input '0<0>0'
 stderr:
 syntax error, unexpected '>', expecting end of file
 ./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-
 209. conflicts.at:301:  ok
-225. conflicts.at:642: testing parse.error=verbose and consistent errors: lr.type=canonical-lr ...
-./conflicts.at:642: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 
-226. conflicts.at:647: testing parse.error=verbose and consistent errors: parse.lac=full ...
-./conflicts.at:647: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
-./conflicts.at:642: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./conflicts.at:647: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
+226. conflicts.at:647: testing parse.error=verbose and consistent errors: parse.lac=full ...
+./conflicts.at:647: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 ./conflicts.at:638:  $PREPARSER ./input
 stderr:
 syntax error, unexpected end of file, expecting 'a'
@@ -6603,288 +6639,303 @@
 
 227. conflicts.at:651: testing parse.error=verbose and consistent errors: parse.lac=full lr.default-reduction=accepting ...
 ./conflicts.at:651: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+./conflicts.at:647: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./conflicts.at:651: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
 ./conflicts.at:642:  $PREPARSER ./input
 stderr:
-stderr:
-stdout:
-./conflicts.at:518:  $PREPARSER ./input
 syntax error, unexpected end of file, expecting 'a'
 ./conflicts.at:642: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error, unexpected end of file
-./conflicts.at:518: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 225. conflicts.at:642:  ok
-212. conflicts.at:518:  ok
-
 
-229. conflicts.at:764: testing Unresolved SR Conflicts ...
-./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c --report=all input.y
 228. conflicts.at:676: testing LAC: %nonassoc requires splitting canonical LR states ...
 ./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y
 stderr:
 stdout:
-./conflicts.at:626:  $PREPARSER ./input
+./conflicts.at:518:  $PREPARSER ./input
 stderr:
-syntax error, unexpected 'b'
-./conflicts.at:626: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error, unexpected end of file
+./conflicts.at:518: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+212. conflicts.at:518:  ok
+
+229. conflicts.at:764: testing Unresolved SR Conflicts ...
 ./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -Werror
+./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c --report=all input.y
 ./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c --report=all input.y -Werror
-222. conflicts.at:626:  ok
-
-230. conflicts.at:887: testing Resolved SR Conflicts ...
-./conflicts.at:898: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c --report=all input.y
-stderr:
 stderr:
 input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr]
 input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+./conflicts.at:726: sed 's,.*/$,,' stderr 1>&2
+./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y --warnings=error
+stderr:
 input.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr]
 input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
 stderr:
-stdout:
 ./conflicts.at:774: sed 's,.*/$,,' stderr 1>&2
-./conflicts.at:647:  $PREPARSER ./input
+stdout:
+./conflicts.at:626:  $PREPARSER ./input
 stderr:
-./conflicts.at:726: sed 's,.*/$,,' stderr 1>&2
 syntax error, unexpected 'b'
-./conflicts.at:647: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./conflicts.at:626: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c --report=all input.y --warnings=error
-./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y --warnings=error
-226. conflicts.at:647:  ok
+222. conflicts.at:626:  ok
+./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -Wnone,none -Werror --trace=none
+
+230. conflicts.at:887: testing Resolved SR Conflicts ...
+./conflicts.at:898: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c --report=all input.y
+./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c --report=all input.y -Wnone,none -Werror --trace=none
+./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y --warnings=none -Werror --trace=none
 ./conflicts.at:901: cat input.output
 230. conflicts.at:887:  ok
+./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c --report=all input.y --warnings=none -Werror --trace=none
 
-
+./conflicts.at:731: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 231. conflicts.at:989: testing %precedence suffices ...
 ./conflicts.at:1006: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -Wnone,none -Werror --trace=none
-232. conflicts.at:1015: testing %precedence does not suffice ...
-./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c --report=all input.y -Wnone,none -Werror --trace=none
+./conflicts.at:780: cat input.output
+229. conflicts.at:764:  ok
+
 231. conflicts.at:989:  ok
-./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y --warnings=none -Werror --trace=none
-./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c --report=all input.y --warnings=none -Werror --trace=none
 
-./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Werror
-./conflicts.at:731: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+232. conflicts.at:1015: testing %precedence does not suffice ...
+./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 233. conflicts.at:1096: testing Syntax error in consistent error state: yacc.c ...
 ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./conflicts.at:780: cat input.output
 stderr:
-./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Werror
-229. conflicts.at:764:  ok
-input.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr]
-input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-input.y:12.3-18: error: rule useless in parser due to conflicts [-Werror=other]
-./conflicts.at:1033: sed 's,.*/$,,' stderr 1>&2
+stdout:
+./conflicts.at:647:  $PREPARSER ./input
+stderr:
+syntax error, unexpected 'b'
+./conflicts.at:647: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+226. conflicts.at:647:  ok
+
+./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Werror
 stderr:
 stdout:
 ./conflicts.at:651:  $PREPARSER ./input
 stderr:
-./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=error
 syntax error, unexpected end of file
 ./conflicts.at:651: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-
-stderr:
-input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other]
-input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other]
-227. conflicts.at:651:  ok
 234. conflicts.at:1096: testing Syntax error in consistent error state: glr.c ...
 ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2
-./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=error
-./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none
-
 ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Werror
+227. conflicts.at:651:  ok
+
+stderr:
+input.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr]
+input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+input.y:12.3-18: error: rule useless in parser due to conflicts [-Werror=other]
 235. conflicts.at:1096: testing Syntax error in consistent error state: lalr1.cc ...
 ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.y
-./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none
-./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none
+./conflicts.at:1033: sed 's,.*/$,,' stderr 1>&2
 stderr:
 input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other]
 input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other]
+./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=error
+./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Werror
 ./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2
-./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.cc input.y -Werror
 ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=error
-232. conflicts.at:1015:  ok
-./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none
+./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.cc input.y -Werror
 stderr:
+./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none
 input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other]
 input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other]
-
-./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none
 ./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2
-236. conflicts.at:1096: testing Syntax error in consistent error state: glr.cc ...
-./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.y
-./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.cc input.y --warnings=error
-./conflicts.at:1096: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.cc input.y -Werror
-./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none
-./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none
-./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none
-./conflicts.at:1096: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none
+./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=error
 stderr:
 input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other]
 input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other]
 ./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2
-./conflicts.at:1096: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.cc input.y --warnings=error
+./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none
+./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none
+./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none
 ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none
-stderr:
-stdout:
-./output.at:836:  $PREPARSER ./parser
-stderr:
-./output.at:836: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-144. output.at:744:  ok
+232. conflicts.at:1015:  ok
+./conflicts.at:1096: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none
 
-237. conflicts.at:1096: testing Syntax error in consistent error state: glr2.cc ...
+236. conflicts.at:1096: testing Syntax error in consistent error state: glr.cc ...
 ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.y
 ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none
+./conflicts.at:1096: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./conflicts.at:1096: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.cc input.y -Werror
 stderr:
 stdout:
 ./conflicts.at:732:  $PREPARSER ./input
 stderr:
-./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.cc input.y -Werror
 syntax error, unexpected 'a', expecting 'b'
 ./conflicts.at:732: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \
                  -o input.c input.y
-./conflicts.at:1096: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./output.at:836:  $PREPARSER ./parser
+stderr:
+./output.at:836: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+144. output.at:744:  ok
+
 stderr:
 input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other]
 input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other]
+237. conflicts.at:1096: testing Syntax error in consistent error state: glr2.cc ...
+./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.y
 ./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2
+./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \
+                 -o input.c input.y -Werror
 ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.cc input.y --warnings=error
 stderr:
-stdout:
+input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr]
+input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.cc input.y -Werror
+./conflicts.at:737: sed 's,.*/$,,' stderr 1>&2
 ./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \
-                 -o input.c input.y -Werror
+                 -o input.c input.y --warnings=error
+./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none
+stderr:
+input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other]
+input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other]
+./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2
+./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \
+                 -o input.c input.y -Wnone,none -Werror --trace=none
+stderr:
+stdout:
 ./conflicts.at:558:  $PREPARSER ./input
+./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.cc input.y --warnings=error
 stderr:
 syntax error, unexpected end of file, expecting 'b'
 ./conflicts.at:558: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 219. conflicts.at:558:  ok
-stderr:
-stdout:
-stderr:
-./conflicts.at:564:  $PREPARSER ./input
-input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr]
-input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-stderr:
+./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none
 
-syntax error, unexpected end of file, expecting 'b'
-./conflicts.at:564: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./conflicts.at:737: sed 's,.*/$,,' stderr 1>&2
-220. conflicts.at:564:  ok
-./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none
 238. conflicts.at:1127: testing Defaulted Conflicted Reduction ...
 ./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c --report=all input.y
 ./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \
-                 -o input.c input.y --warnings=error
-
-239. conflicts.at:1264: testing %expect not enough ...
-./conflicts.at:1273: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y
-./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none
-239. conflicts.at:1264:  ok
-./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \
-                 -o input.c input.y -Wnone,none -Werror --trace=none
+                 -o input.c input.y --warnings=none -Werror --trace=none
+./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none
+./conflicts.at:1096: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 ./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c --report=all input.y -Werror
-
-240. conflicts.at:1284: testing %expect right ...
-./conflicts.at:1293: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+./conflicts.at:742: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none
+stderr:
+input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr]
+input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+input.y:4.6-8: error: rule useless in parser due to conflicts [-Werror=other]
+./conflicts.at:1138: sed 's,.*/$,,' stderr 1>&2
+./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c --report=all input.y --warnings=error
 stderr:
 stdout:
 ./conflicts.at:1096:  $PREPARSER ./input
-./conflicts.at:1096: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 syntax error
 ./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr]
-input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-input.y:4.6-8: error: rule useless in parser due to conflicts [-Werror=other]
-./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \
-                 -o input.c input.y --warnings=none -Werror --trace=none
+./conflicts.at:1096: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 233. conflicts.at:1096:  ok
-240. conflicts.at:1284:  ok
-./conflicts.at:1138: sed 's,.*/$,,' stderr 1>&2
-./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c --report=all input.y --warnings=error
+
+./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c --report=all input.y -Wnone,none -Werror --trace=none
+stderr:
+239. conflicts.at:1264: testing %expect not enough ...
+stdout:
+./conflicts.at:1273: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y
+./conflicts.at:564:  $PREPARSER ./input
+stderr:
+syntax error, unexpected end of file, expecting 'b'
+./conflicts.at:564: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+239. conflicts.at:1264:  ok
+220. conflicts.at:564:  ok
 
 
-242. conflicts.at:1321: testing %expect with reduce conflicts ...
-./conflicts.at:1330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y
-242. conflicts.at:1321:  ok
+./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c --report=all input.y --warnings=none -Werror --trace=none
+240. conflicts.at:1284: testing %expect right ...
+./conflicts.at:1293: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 241. conflicts.at:1301: testing %expect too much ...
 ./conflicts.at:1310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y
 241. conflicts.at:1301:  ok
-./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c --report=all input.y -Wnone,none -Werror --trace=none
 
-./conflicts.at:742: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./conflicts.at:1145: cat input.output
+238. conflicts.at:1127:  ok
+240. conflicts.at:1284:  ok
+242. conflicts.at:1321: testing %expect with reduce conflicts ...
+./conflicts.at:1330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y
+242. conflicts.at:1321:  ok
+
+
 
 243. conflicts.at:1341: testing %expect in grammar rule not enough ...
 ./conflicts.at:1350: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y
 244. conflicts.at:1360: testing %expect in grammar rule right ...
-./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c --report=all input.y --warnings=none -Werror --trace=none
 ./conflicts.at:1369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-243. conflicts.at:1341:  ok
-
-244. conflicts.at:1360:  ok
-./conflicts.at:1145: cat input.output
-238. conflicts.at:1127:  ok
-
 245. conflicts.at:1377: testing %expect in grammar rules ...
 ./conflicts.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c -rall input.y
+243. conflicts.at:1341:  ok
 
 246. conflicts.at:1396: testing %expect in grammar rule too much ...
 ./conflicts.at:1405: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y
-247. conflicts.at:1415: testing %expect-rr in grammar rule ...
-./conflicts.at:1432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+244. conflicts.at:1360:  ok
 246. conflicts.at:1396:  ok
 
-245. conflicts.at:1377:  ok
-247. conflicts.at:1415:  ok
 
+245. conflicts.at:1377:  ok
+247. conflicts.at:1415: testing %expect-rr in grammar rule ...
+./conflicts.at:1432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 248. conflicts.at:1440: testing %expect-rr too much in grammar rule ...
 ./conflicts.at:1457: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y
+
 248. conflicts.at:1440:  ok
-249. conflicts.at:1469: testing %expect-rr not enough in grammar rule ...
 
+249. conflicts.at:1469: testing %expect-rr not enough in grammar rule ...
 ./conflicts.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y
-
 249. conflicts.at:1469:  ok
 250. conflicts.at:1498: testing %prec with user string ...
 ./conflicts.at:1507: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 
+247. conflicts.at:1415:  ok
+
 251. conflicts.at:1515: testing %no-default-prec without %prec ...
 ./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall -o input.c input.y
+250. conflicts.at:1498:  ok
 252. conflicts.at:1544: testing %no-default-prec with %prec ...
 ./conflicts.at:1560: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-250. conflicts.at:1498:  ok
+
 ./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall -o input.c input.y -Werror
+253. conflicts.at:1568: testing %default-prec ...
+./conflicts.at:1584: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 252. conflicts.at:1544:  ok
 
+254. conflicts.at:1592: testing Unreachable States After Conflict Resolution ...
 stderr:
-
+./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --report=all input.y
 input.y: error: 4 shift/reduce conflicts [-Werror=conflicts-sr]
 input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
 input.y:1.1-5: error: useless precedence and associativity for '+' [-Werror=precedence]
 input.y:2.1-5: error: useless precedence and associativity for '*' [-Werror=precedence]
-253. conflicts.at:1568: testing %default-prec ...
-./conflicts.at:1584: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+253. conflicts.at:1568:  ok
 ./conflicts.at:1531: sed 's,.*/$,,' stderr 1>&2
-254. conflicts.at:1592: testing Unreachable States After Conflict Resolution ...
 ./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall -o input.c input.y --warnings=error
-./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --report=all input.y
-253. conflicts.at:1568:  ok
-./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall -o input.c input.y -Wnone,none -Werror --trace=none
 
-./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --report=all input.y -Werror
 255. conflicts.at:1855: testing Solved conflicts report for multiple reductions in a state ...
 ./conflicts.at:1881: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --report=all -o input.c input.y
-./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall -o input.c input.y --warnings=none -Werror --trace=none
+./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --report=all input.y -Werror
+./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall -o input.c input.y -Wnone,none -Werror --trace=none
+stderr:
+stdout:
+./conflicts.at:743:  $PREPARSER ./input
+stderr:
+input.y:7.5-7: warning: rule useless in parser due to conflicts [-Wother]
+input.y:11.5-7: warning: rule useless in parser due to conflicts [-Wother]
+input.y:17.11-26: warning: rule useless in parser due to conflicts [-Wother]
+input.y:18.11-26: warning: rule useless in parser due to conflicts [-Wother]
+input.y:19.11-26: warning: rule useless in parser due to conflicts [-Wother]
+stderr:
+./conflicts.at:1882: cat input.output | sed -n '/^State 0$/,/^State 1$/p'
+syntax error, unexpected 'a', expecting 'b' or 'c'
+./conflicts.at:743: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y
+255. conflicts.at:1855:  ok
 input.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr]
 input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr]
 input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
@@ -6896,25 +6947,24 @@
 input.y:31.5-7: error: rule useless in parser due to conflicts [-Werror=other]
 input.y:32.4: error: rule useless in parser due to conflicts [-Werror=other]
 ./conflicts.at:1638: sed 's,.*/$,,' stderr 1>&2
-./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --report=all input.y --warnings=error
-251. conflicts.at:1515:  ok
-stderr:
-input.y:7.5-7: warning: rule useless in parser due to conflicts [-Wother]
-input.y:11.5-7: warning: rule useless in parser due to conflicts [-Wother]
-input.y:17.11-26: warning: rule useless in parser due to conflicts [-Wother]
-input.y:18.11-26: warning: rule useless in parser due to conflicts [-Wother]
-input.y:19.11-26: warning: rule useless in parser due to conflicts [-Wother]
-./conflicts.at:1882: cat input.output | sed -n '/^State 0$/,/^State 1$/p'
-
-255. conflicts.at:1855:  ok
 
+./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall -o input.c input.y --warnings=none -Werror --trace=none
+./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --report=all input.y --warnings=error
 256. conflicts.at:1935: testing %nonassoc error actions for multiple reductions in a state ...
-./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --report=all input.y -Wnone,none -Werror --trace=none
 ./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y
+./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y -Werror
+251. conflicts.at:1515:  ok
+./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --report=all input.y -Wnone,none -Werror --trace=none
+
+./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -Werror
 257. conflicts.at:2299: testing %expect-rr non GLR ...
 ./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret 1.y
+stderr:
+input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr]
+input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
 ./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --report=all input.y --warnings=none -Werror --trace=none
-./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -Werror
+./conflicts.at:748: sed 's,.*/$,,' stderr 1>&2
+./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y --warnings=error
 ./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret 1.y -Werror
 stderr:
 bison (GNU Bison) 3.8.2
@@ -7195,26 +7245,21 @@
            `-> 13: %empty .
 
 
-./conflicts.at:1651: cat input.output
 ./conflicts.at:1959: sed 's,.*/$,,' stderr 1>&2
-stderr:
+./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y --warnings=error
+./conflicts.at:1651: cat input.output
 ./conflicts.at:1836: cat input.y >> input-keep.y
-1.y: error: %expect-rr applies only to GLR parsers [-Werror=other]
 ./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret input-keep.y
-./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y --warnings=error
+stderr:
+1.y: error: %expect-rr applies only to GLR parsers [-Werror=other]
+./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y -Wnone,none -Werror --trace=none
 ./conflicts.at:2307: sed 's,.*/$,,' stderr 1>&2
 ./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret 1.y --warnings=error
-stderr:
-stdout:
-./conflicts.at:743:  $PREPARSER ./input
-stderr:
-syntax error, unexpected 'a', expecting 'b' or 'c'
-./conflicts.at:743: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y
+./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -Wnone,none -Werror --trace=none
 ./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input-keep.y -Werror
+./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y --warnings=none -Werror --trace=none
 ./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret 1.y -Wnone,none -Werror --trace=none
-./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -Wnone,none -Werror --trace=none
-./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret 1.y --warnings=none -Werror --trace=none
+./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y --warnings=none -Werror --trace=none
 stderr:
 input-keep.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr]
 input-keep.y: error: 2 reduce/reduce conflicts [-Werror=conflicts-rr]
@@ -7223,46 +7268,49 @@
 input-keep.y:26.16: error: rule useless in parser due to conflicts [-Werror=other]
 input-keep.y:32.5-7: error: rule useless in parser due to conflicts [-Werror=other]
 input-keep.y:33.4: error: rule useless in parser due to conflicts [-Werror=other]
-./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y -Werror
 ./conflicts.at:1838: sed 's,.*/$,,' stderr 1>&2
-./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y --warnings=none -Werror --trace=none
+./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret 1.y --warnings=none -Werror --trace=none
 ./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input-keep.y --warnings=error
-./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret 2.y
-stderr:
-input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr]
-input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+./conflicts.at:753: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./conflicts.at:2239: cat input.output | sed -n '/^State 0$/,/^State 1$/p'
-./conflicts.at:748: sed 's,.*/$,,' stderr 1>&2
-./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input-keep.y -Wnone,none -Werror --trace=none
 256. conflicts.at:1935:  ok
-./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y --warnings=error
+./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret 2.y
 
-./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret 2.y -Werror
-./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input-keep.y --warnings=none -Werror --trace=none
+./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input-keep.y -Wnone,none -Werror --trace=none
 258. conflicts.at:2331: testing -W versus %expect and %expect-rr ...
 ./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret sr-rr.y
-./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y -Wnone,none -Werror --trace=none
+./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret 2.y -Werror
+./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input-keep.y --warnings=none -Werror --trace=none
 stderr:
+./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret sr-rr.y -Werror
 2.y: error: %expect-rr applies only to GLR parsers [-Werror=other]
 2.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr]
 2.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
 2.y:3.12-14: error: rule useless in parser due to conflicts [-Werror=other]
 ./conflicts.at:2317: sed 's,.*/$,,' stderr 1>&2
-./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y --warnings=none -Werror --trace=none
 ./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret 2.y --warnings=error
 254. conflicts.at:1592:  ok
-./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret sr-rr.y -Werror
+stderr:
 
-259. counterexample.at:43: testing Unifying S/R ...
-./counterexample.at:55: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
+stdout:
+./conflicts.at:1096:  $PREPARSER ./input
 stderr:
-./conflicts.at:753: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-sr-rr.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr]
+syntax error
+./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+234. conflicts.at:1096: sr-rr.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr]
 sr-rr.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr]
 sr-rr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret 2.y -Wnone,none -Werror --trace=none
+ ok
+259. counterexample.at:43: testing Unifying S/R ...
+./counterexample.at:55: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
 ./conflicts.at:2354: sed 's,.*/$,,' stderr 1>&2
+./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret 2.y -Wnone,none -Werror --trace=none
+
 ./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret sr-rr.y --warnings=error
+260. counterexample.at:83: testing Deep Unifying S/R ...
+./counterexample.at:95: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
+./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret 2.y --warnings=none -Werror --trace=none
 stderr:
 input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
 input.y: warning: shift/reduce conflict on token B [-Wcounterexamples]
@@ -7278,34 +7326,74 @@
 input.y:4.4: warning: rule useless in parser due to conflicts [-Wother]
 ./counterexample.at:55: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr
 ./counterexample.at:55: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
-./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret 2.y --warnings=none -Werror --trace=none
 ./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret sr-rr.y -Wnone,none -Werror --trace=none
 stderr:
 input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
 input.y: warning: shift/reduce conflict on token B [-Wcounterexamples]
+  Example: A . B C
+  Shift derivation
+    s
+    `-> 1: ac
+           `-> 3: A ac                C
+                    `-> 4: b
+                           `-> 5: . B
+  Reduce derivation
+    s
+    `-> 2: a          bc
+           `-> 7: A . `-> 10: B C
+input.y: warning: shift/reduce conflict on token B [-Wcounterexamples]
+  Example: A A . B B C C
+  Shift derivation
+    s
+    `-> 1: ac
+           `-> 3: A ac                                    C
+                    `-> 3: A ac                         C
+                             `-> 4: b
+                                    `-> 6: . b
+                                             `-> 5: B B
+  Reduce derivation
+    s
+    `-> 2: a                   bc
+           `-> 8: A a          `-> 9: B bc          C
+                    `-> 7: A .          `-> 10: B C
+input.y:6.4: warning: rule useless in parser due to conflicts [-Wother]
+./counterexample.at:95: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr
+./counterexample.at:95: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
+257. conflicts.at:2299:  ok
+./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret sr-rr.y --warnings=none -Werror --trace=none
+stderr:
+input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
+input.y: warning: shift/reduce conflict on token B [-Wcounterexamples]
   Example           A . B C
   Shift derivation  s -> [ y -> [ A . B ] c -> [ C ] ]
   Reduce derivation s -> [ a -> [ A . ] x -> [ B C ] ]
 input.y:4.4: warning: rule useless in parser due to conflicts [-Wother]
 ./counterexample.at:55: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
-257. conflicts.at:2299:  ok
-259. counterexample.at:43:  ok
 
+259. counterexample.at:43:  ok
 
-./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret sr-rr.y --warnings=none -Werror --trace=none
 261. counterexample.at:144: testing S/R Conflict with Nullable Symbols ...
 ./counterexample.at:157: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
-260. counterexample.at:83: testing Deep Unifying S/R ...
-./counterexample.at:95: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
-stderr:
-stdout:
-./conflicts.at:1096:  $PREPARSER ./input
 stderr:
-syntax error
-./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-234. conflicts.at:1096:  ok
+input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
+input.y: warning: shift/reduce conflict on token B [-Wcounterexamples]
+  Example           A . B C
+  Shift derivation  s -> [ ac -> [ A ac -> [ b -> [ . B ] ] C ] ]
+  Reduce derivation s -> [ a -> [ A . ] bc -> [ B C ] ]
+input.y: warning: shift/reduce conflict on token B [-Wcounterexamples]
+  Example           A A . B B C C
+  Shift derivation  s -> [ ac -> [ A ac -> [ A ac -> [ b -> [ . b -> [ B B ] ] ] C ] C ] ]
+  Reduce derivation s -> [ a -> [ A a -> [ A . ] ] bc -> [ B bc -> [ B C ] C ] ]
+input.y:6.4: warning: rule useless in parser due to conflicts [-Wother]
+./counterexample.at:95: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
 ./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y
+262. counterexample.at:207: testing Non-unifying Ambiguous S/R ...
+./counterexample.at:220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
+260. counterexample.at:83:  ok
+
 stderr:
+263. counterexample.at:254: testing Non-unifying Unambiguous S/R ...
+./counterexample.at:265: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
 input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr]
 input.y: warning: shift/reduce conflict on token B [-Wcounterexamples]
   Example: A . B
@@ -7336,59 +7424,9 @@
                                     `-> 4: %empty .
 input.y:5.4-9: warning: rule useless in parser due to conflicts [-Wother]
 ./counterexample.at:157: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr
-
 ./counterexample.at:157: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
-stderr:
-input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
-input.y: warning: shift/reduce conflict on token B [-Wcounterexamples]
-  Example: A . B C
-  Shift derivation
-    s
-    `-> 1: ac
-           `-> 3: A ac                C
-                    `-> 4: b
-                           `-> 5: . B
-  Reduce derivation
-    s
-    `-> 2: a          bc
-           `-> 7: A . `-> 10: B C
-input.y: warning: shift/reduce conflict on token B [-Wcounterexamples]
-  Example: A A . B B C C
-  Shift derivation
-    s
-    `-> 1: ac
-           `-> 3: A ac                                    C
-                    `-> 3: A ac                         C
-                             `-> 4: b
-                                    `-> 6: . b
-                                             `-> 5: B B
-  Reduce derivation
-    s
-    `-> 2: a                   bc
-           `-> 8: A a          `-> 9: B bc          C
-                    `-> 7: A .          `-> 10: B C
-input.y:6.4: warning: rule useless in parser due to conflicts [-Wother]
-./counterexample.at:95: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr
-262. counterexample.at:207: testing Non-unifying Ambiguous S/R ...
-./counterexample.at:220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
-./counterexample.at:95: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
 ./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y -Werror
 stderr:
-input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr]
-input.y: warning: shift/reduce conflict on token B [-Wcounterexamples]
-  Example           A . B
-  Shift derivation  s -> [ A xby -> [ . B ] ]
-  Reduce derivation s -> [ ax -> [ A x -> [ . ] ] by -> [ B y -> [ ] ] ]
-input.y: warning: shift/reduce conflict on token B [-Wcounterexamples]
-  First example     A X . B Y $end
-  Shift derivation  $accept -> [ s -> [ A xby -> [ X xby -> [ . B ] Y ] ] $end ]
-  Second example    A X . B y $end
-  Reduce derivation $accept -> [ s -> [ ax -> [ A x -> [ X x -> [ . ] ] ] by -> [ B y ] ] $end ]
-input.y:5.4-9: warning: rule useless in parser due to conflicts [-Wother]
-./counterexample.at:157: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
-261. counterexample.at:144:  ok
-
-stderr:
 input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
 input.y: warning: shift/reduce conflict on token C [-Wcounterexamples]
   First example: B . C $end
@@ -7409,27 +7447,6 @@
 ./counterexample.at:220: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr
 ./counterexample.at:220: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
 stderr:
-sr-rr.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr]
-sr-rr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-stderr:
-input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
-input.y: warning: shift/reduce conflict on token B [-Wcounterexamples]
-  Example           A . B C
-  Shift derivation  s -> [ ac -> [ A ac -> [ b -> [ . B ] ] C ] ]
-  Reduce derivation s -> [ a -> [ A . ] bc -> [ B C ] ]
-input.y: warning: shift/reduce conflict on token B [-Wcounterexamples]
-  Example           A A . B B C C
-  Shift derivation  s -> [ ac -> [ A ac -> [ A ac -> [ b -> [ . b -> [ B B ] ] ] C ] C ] ]
-  Reduce derivation s -> [ a -> [ A a -> [ A . ] ] bc -> [ B bc -> [ B C ] C ] ]
-input.y:6.4: warning: rule useless in parser due to conflicts [-Wother]
-./counterexample.at:95: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
-263. counterexample.at:254: testing Non-unifying Unambiguous S/R ...
-./counterexample.at:265: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
-./conflicts.at:2359: sed 's,.*/$,,' stderr 1>&2
-260. counterexample.at:83:  ok
-./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y --warnings=error
-
-stderr:
 input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
 input.y: warning: shift/reduce conflict on token A [-Wcounterexamples]
   First example: A . A B $end
@@ -7448,6 +7465,26 @@
                          `-> 3: x                 `-> 5: A
                                 `-> 5: A .
 ./counterexample.at:265: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr
+./counterexample.at:265: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
+stderr:
+input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr]
+input.y: warning: shift/reduce conflict on token B [-Wcounterexamples]
+  Example           A . B
+  Shift derivation  s -> [ A xby -> [ . B ] ]
+  Reduce derivation s -> [ ax -> [ A x -> [ . ] ] by -> [ B y -> [ ] ] ]
+input.y: warning: shift/reduce conflict on token B [-Wcounterexamples]
+  First example     A X . B Y $end
+  Shift derivation  $accept -> [ s -> [ A xby -> [ X xby -> [ . B ] Y ] ] $end ]
+  Second example    A X . B y $end
+  Reduce derivation $accept -> [ s -> [ ax -> [ A x -> [ X x -> [ . ] ] ] by -> [ B y ] ] $end ]
+input.y:5.4-9: warning: rule useless in parser due to conflicts [-Wother]
+./counterexample.at:157: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
+stderr:
+sr-rr.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr]
+sr-rr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+261. counterexample.at:144:  ok
+./conflicts.at:2359: sed 's,.*/$,,' stderr 1>&2
+
 stderr:
 input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
 input.y: warning: shift/reduce conflict on token C [-Wcounterexamples]
@@ -7457,14 +7494,12 @@
   Reduce derivation $accept -> [ g -> [ x -> [ b -> [ B . ] cd -> [ C D ] ] ] $end ]
 input.y:6.4: warning: rule useless in parser due to conflicts [-Wother]
 ./counterexample.at:220: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
+./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y --warnings=error
+262. counterexample.at:207:  ok
 264. counterexample.at:298: testing S/R after first token ...
 ./counterexample.at:314: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
-./counterexample.at:265: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
-262. counterexample.at:207:  ok
 
-./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y -Wnone,none -Werror --trace=none
 stderr:
-265. counterexample.at:363: testing Unifying R/R counterexample ...
 input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
 input.y: warning: shift/reduce conflict on token A [-Wcounterexamples]
   First example     A . A B $end
@@ -7472,8 +7507,13 @@
   Second example    A . A $end
   Reduce derivation $accept -> [ s -> [ s -> [ t -> [ x -> [ A . ] ] ] t -> [ x -> [ A ] ] ] $end ]
 ./counterexample.at:265: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
-./counterexample.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
 263. counterexample.at:254:  ok
+265. counterexample.at:363: testing Unifying R/R counterexample ...
+./counterexample.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
+
+./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y -Wnone,none -Werror --trace=none
+266. counterexample.at:399: testing Non-unifying R/R LR(1) conflict ...
+./counterexample.at:409: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
 stderr:
 input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr]
 input.y: warning: shift/reduce conflict on token A [-Wcounterexamples]
@@ -7503,9 +7543,7 @@
 input.y:4.4: warning: rule useless in parser due to conflicts [-Wother]
 input.y:8.4: warning: rule useless in parser due to conflicts [-Wother]
 ./counterexample.at:314: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr
-
 ./counterexample.at:314: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
-./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y --warnings=none -Werror --trace=none
 stderr:
 input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr]
 input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples]
@@ -7518,35 +7556,16 @@
     `-> 1: A b
              `-> 3: b .
 input.y:4.9: warning: rule useless in parser due to conflicts [-Wother]
-266. counterexample.at:399: testing Non-unifying R/R LR(1) conflict ...
 ./counterexample.at:372: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr
-./counterexample.at:409: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
 ./counterexample.at:372: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
+./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y --warnings=none -Werror --trace=none
 stderr:
-input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr]
-input.y: warning: shift/reduce conflict on token A [-Wcounterexamples]
-  Example           b . A X X Y
-  Shift derivation  a -> [ s -> [ b . xx -> [ A X X ] y -> [ Y ] ] ]
-  Reduce derivation a -> [ r -> [ b . ] t -> [ A x -> [ X ] xy -> [ X Y ] ] ]
-input.y: warning: shift/reduce conflict on token X [-Wcounterexamples]
-  First example     A X . X
-  Shift derivation  a -> [ t -> [ A xx -> [ X . X ] ] ]
-  Second example    X . X xy
-  Reduce derivation a -> [ x -> [ X . ] t -> [ X xy ] ]
-input.y:4.4: warning: rule useless in parser due to conflicts [-Wother]
-input.y:8.4: warning: rule useless in parser due to conflicts [-Wother]
-./counterexample.at:314: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
+stdout:
+./conflicts.at:754:  $PREPARSER ./input
 stderr:
-./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y
-264. counterexample.at:298:  ok
-input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr]
-input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples]
-  Example                  A b .
-  First reduce derivation  a -> [ A b . ]
-  Second reduce derivation a -> [ A b -> [ b . ] ]
-input.y:4.9: warning: rule useless in parser due to conflicts [-Wother]
 stderr:
-./counterexample.at:372: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
+syntax error, unexpected 'a', expecting 'b' or 'c'
+./conflicts.at:754: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input.y: warning: 2 reduce/reduce conflicts [-Wconflicts-rr]
 input.y: warning: reduce/reduce conflict on tokens A, C [-Wcounterexamples]
   First example: D . A $end
@@ -7563,12 +7582,38 @@
                     `-> 6: D .
 input.y:5.4: warning: rule useless in parser due to conflicts [-Wother]
 ./counterexample.at:409: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr
-265. counterexample.at:363:  ok
 ./counterexample.at:409: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
+228. conflicts.at:676:  ok
+stderr:
+
+input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr]
+input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples]
+  Example                  A b .
+  First reduce derivation  a -> [ A b . ]
+  Second reduce derivation a -> [ A b -> [ b . ] ]
+input.y:4.9: warning: rule useless in parser due to conflicts [-Wother]
+stderr:
+./counterexample.at:372: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
+input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr]
+input.y: warning: shift/reduce conflict on token A [-Wcounterexamples]
+  Example           b . A X X Y
+  Shift derivation  a -> [ s -> [ b . xx -> [ A X X ] y -> [ Y ] ] ]
+  Reduce derivation a -> [ r -> [ b . ] t -> [ A x -> [ X ] xy -> [ X Y ] ] ]
+input.y: warning: shift/reduce conflict on token X [-Wcounterexamples]
+  First example     A X . X
+  Shift derivation  a -> [ t -> [ A xx -> [ X . X ] ] ]
+  Second example    X . X xy
+  Reduce derivation a -> [ x -> [ X . ] t -> [ X xy ] ]
+input.y:4.4: warning: rule useless in parser due to conflicts [-Wother]
+input.y:8.4: warning: rule useless in parser due to conflicts [-Wother]
+./counterexample.at:314: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
+265. counterexample.at:363:  ok
+264. counterexample.at:298:  ok
+
 
 267. counterexample.at:441: testing Non-unifying R/R LR(2) conflict ...
 ./counterexample.at:451: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
-
+./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y
 stderr:
 input.y: warning: 2 reduce/reduce conflicts [-Wconflicts-rr]
 input.y: warning: reduce/reduce conflict on tokens A, C [-Wcounterexamples]
@@ -7577,16 +7622,16 @@
   Second example           B D . A $end
   Second reduce derivation $accept -> [ s -> [ B b -> [ D . ] A ] $end ]
 input.y:5.4: warning: rule useless in parser due to conflicts [-Wother]
-./counterexample.at:409: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
-./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -Werror
-266. counterexample.at:399:  ok
+269. counterexample.at:550: testing R/R cex with prec ...
 268. counterexample.at:488: testing Cex Search Prepend ...
+./counterexample.at:409: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
+./counterexample.at:562: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
 ./counterexample.at:499: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
+266. counterexample.at:399:  ok
 
-269. counterexample.at:550: testing R/R cex with prec ...
-./counterexample.at:562: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
-stderr:
-sr-rr.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr]
+270. counterexample.at:610: testing Null nonterminals ...
+./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -Werror
+./counterexample.at:621: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
 stderr:
 input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
 input.y: warning: shift/reduce conflict on token B [-Wcounterexamples]
@@ -7617,9 +7662,7 @@
                              `-> 7: A .
 input.y:5.4: warning: rule useless in parser due to conflicts [-Wother]
 ./counterexample.at:499: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr
-./conflicts.at:2363: sed 's,.*/$,,' stderr 1>&2
 ./counterexample.at:499: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
-./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y --warnings=error
 stderr:
 input.y: warning: 4 reduce/reduce conflicts [-Wconflicts-rr]
 input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples]
@@ -7651,6 +7694,11 @@
                            `-> 7: A               c A
                                   `-> 5: %empty .   `-> 7: %empty
 ./counterexample.at:562: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr
+./counterexample.at:562: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
+stderr:
+sr-rr.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr]
+./conflicts.at:2363: sed 's,.*/$,,' stderr 1>&2
+./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y --warnings=error
 stderr:
 input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
 input.y: warning: shift/reduce conflict on token B [-Wcounterexamples]
@@ -7663,22 +7711,12 @@
   Reduce derivation s -> [ n -> [ N n -> [ N a -> [ A . ] B ] D ] C ]
 input.y:5.4: warning: rule useless in parser due to conflicts [-Wother]
 ./counterexample.at:499: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
-./counterexample.at:562: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
-./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -Wnone,none -Werror --trace=none
 268. counterexample.at:488:  ok
 
+./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -Wnone,none -Werror --trace=none
+271. counterexample.at:797: testing Non-unifying Prefix Share ...
 stderr:
-stdout:
-./conflicts.at:754:  $PREPARSER ./input
-stderr:
-syntax error, unexpected 'a', expecting 'b' or 'c'
-./conflicts.at:754: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-228. conflicts.at:676:  ok
-./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y --warnings=none -Werror --trace=none
-270. counterexample.at:610: testing Null nonterminals ...
-./counterexample.at:621: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
-stderr:
-
+./counterexample.at:810: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
 input.y: warning: 4 reduce/reduce conflicts [-Wconflicts-rr]
 input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples]
   Example                  B . b c
@@ -7691,12 +7729,9 @@
 ./counterexample.at:562: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
 269. counterexample.at:550:  ok
 
-271. counterexample.at:797: testing Non-unifying Prefix Share ...
-./counterexample.at:810: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
-./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
 272. counterexample.at:842: testing Deep Null Unifying ...
 ./counterexample.at:854: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
-./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y --warnings=none -Werror --trace=none
 stderr:
 input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
 input.y: warning: shift/reduce conflict on token J [-Wcounterexamples]
@@ -7713,8 +7748,6 @@
 input.y:5.13-15: warning: rule useless in parser due to conflicts [-Wother]
 ./counterexample.at:810: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr
 ./counterexample.at:810: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
-./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
-./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
 stderr:
 input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
 input.y: warning: shift/reduce conflict on token D [-Wcounterexamples]
@@ -7731,7 +7764,6 @@
                              `-> 5: %empty .
 ./counterexample.at:854: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr
 ./counterexample.at:854: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
-./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
 stderr:
 input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
 input.y: warning: shift/reduce conflict on token J [-Wcounterexamples]
@@ -7740,10 +7772,16 @@
   Reduce derivation s -> [ a -> [ H i -> [ i J . ] J J ] ]
 input.y:5.13-15: warning: rule useless in parser due to conflicts [-Wother]
 ./counterexample.at:810: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
-./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
 271. counterexample.at:797:  ok
-stderr:
+./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+
+./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
+./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
 ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
+273. counterexample.at:884: testing Deep Null Non-unifying ...
+./counterexample.at:896: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
+stderr:
 input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
 input.y: warning: shift/reduce conflict on token D [-Wcounterexamples]
   Example           A a . D
@@ -7752,20 +7790,13 @@
 ./counterexample.at:854: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
 ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
 272. counterexample.at:842:  ok
-
 ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
 
-273. counterexample.at:884: testing Deep Null Non-unifying ...
-./counterexample.at:896: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
-./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
-274. synclines.at:194: testing Prologue syncline ...
-./synclines.at:194: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
 ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
 ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
-./synclines.at:194: $CC $CFLAGS $CPPFLAGS -c syncline.c
 ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
-./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
+274. synclines.at:194: testing Prologue syncline ...
+./synclines.at:194: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 stderr:
 input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
 input.y: warning: shift/reduce conflict on token D [-Wcounterexamples]
@@ -7784,8 +7815,23 @@
                              `-> 4: c
                                     `-> 5: %empty .
 ./counterexample.at:896: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr
-stderr:
 ./counterexample.at:896: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
+./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
+./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
+./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+./synclines.at:194: $CC $CFLAGS $CPPFLAGS -c syncline.c
+./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
+./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+stderr:
+input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
+input.y: warning: shift/reduce conflict on token D [-Wcounterexamples]
+  First example     A a . D $end
+  Shift derivation  $accept -> [ s -> [ A a d -> [ . D ] ] $end ]
+  Second example    A a . D E $end
+  Reduce derivation $accept -> [ s -> [ A a a -> [ b -> [ c -> [ . ] ] ] d -> [ D ] E ] $end ]
+./counterexample.at:896: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
+stderr:
 syncline.c: In function 'foo':
 syncline.c:4:2: error: #error "4"
     4 | #error "4"
@@ -7817,22 +7863,17 @@
   s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm;
 EOF
 
-./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+273. counterexample.at:884:  ok
+./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
 stdout:
 syncline.c:4: #error "4"
 ./synclines.at:194: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77
 ./synclines.at:194: $CC $CFLAGS $CPPFLAGS -c input.c
-./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
 ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+
 ./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wnone $file
-stderr:
-input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
-input.y: warning: shift/reduce conflict on token D [-Wcounterexamples]
-  First example     A a . D $end
-  Shift derivation  $accept -> [ s -> [ A a d -> [ . D ] ] $end ]
-  Second example    A a . D E $end
-  Reduce derivation $accept -> [ s -> [ A a a -> [ b -> [ c -> [ . ] ] ] d -> [ D ] E ] $end ]
-./counterexample.at:896: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
+275. synclines.at:214: testing %union syncline ...
+./synclines.at:214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 stderr:
 input.y:2:2: error: #error "2"
     2 | #error "2"
@@ -7864,22 +7905,15 @@
   s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm;
 EOF
 
-273. counterexample.at:884:  ok
 stdout:
 input.y:2: #error "2"
 ./synclines.at:194: cat stdout
 274. synclines.at:194:  ok
-
 ./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Werror $file
 
-275. synclines.at:214: testing %union syncline ...
-./synclines.at:214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+./synclines.at:214: $CC $CFLAGS $CPPFLAGS -c syncline.c
 276. synclines.at:237: testing %union name syncline ...
 ./synclines.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./synclines.at:214: $CC $CFLAGS $CPPFLAGS -c syncline.c
-./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
-./synclines.at:254: $CC $CFLAGS $CPPFLAGS -c syncline.c
-./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
 stderr:
 syncline.c: In function 'foo':
 syncline.c:4:2: error: #error "4"
@@ -7912,12 +7946,25 @@
   s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm;
 EOF
 
+stdout:
+syncline.c:4: #error "4"
+./synclines.at:214: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77
+./synclines.at:214: $CC $CFLAGS $CPPFLAGS -c input.c
+./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
+./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
 stderr:
-syncline.c: In function 'foo':
-syncline.c:4:2: error: #error "4"
-    4 | #error "4"
+stdout:
+./synclines.at:254: $CC $CFLAGS $CPPFLAGS -c syncline.c
+./conflicts.at:1096:  $PREPARSER ./input
+stderr:
+syntax error
+./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
+stderr:
+input.y:2:2: error: #error "2"
+    2 | #error "2"
       |  ^~~~~
-./synclines.at:254: "$PERL" -p -0777 - stderr <<\EOF || exit 77
+./synclines.at:214: "$PERL" -p -0777 - stderr <<\EOF || exit 77
   # Remove left-hand margin.
   s/^[\d ]{6}\| //gm;
 
@@ -7944,23 +7991,20 @@
   s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm;
 EOF
 
-./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
-stdout:
-stdout:
-syncline.c:4: #error "4"
-./synclines.at:214: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77
-syncline.c:4: #error "4"
-./synclines.at:254: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77
-./synclines.at:214: $CC $CFLAGS $CPPFLAGS -c input.c
-./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
-./synclines.at:254: $CC $CFLAGS $CPPFLAGS -c input.c
-./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
+235. conflicts.at:1096:  ok
 ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+stdout:
+input.y:2: #error "2"
+./synclines.at:214: cat stdout
 stderr:
-input.y:2:2: error: #error "2"
-    2 | #error "2"
+
+./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
+275. synclines.at:214: syncline.c: In function 'foo':
+syncline.c:4:2: error: #error "4"
+    4 | #error "4"
       |  ^~~~~
-./synclines.at:214: "$PERL" -p -0777 - stderr <<\EOF || exit 77
+ ok
+./synclines.at:254: "$PERL" -p -0777 - stderr <<\EOF || exit 77
   # Remove left-hand margin.
   s/^[\d ]{6}\| //gm;
 
@@ -7988,9 +8032,19 @@
 EOF
 
 stdout:
+./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+syncline.c:4: #error "4"
+./synclines.at:254: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77
+
+./synclines.at:254: $CC $CFLAGS $CPPFLAGS -c input.c
 ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
-input.y:2: #error "2"
-./synclines.at:214: cat stdout
+277. synclines.at:264: testing Postprologue syncline ...
+./synclines.at:264: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+278. synclines.at:291: testing Action syncline ...
+./synclines.at:291: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
+./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
 stderr:
 input.y:1:7: error: expected '{' before 'break'
     1 | %union break
@@ -8122,6 +8176,7 @@
 input.c:1162:11: warning: implicit declaration of function 'yydestruct' [-Wimplicit-function-declaration]
  1162 |           yydestruct ("Error: discarding",
       |           ^~~~~~~~~~
+./synclines.at:264: $CC $CFLAGS $CPPFLAGS -c syncline.c
 ./synclines.at:254: "$PERL" -p -0777 - stderr <<\EOF || exit 77
   # Remove left-hand margin.
   s/^[\d ]{6}\| //gm;
@@ -8149,8 +8204,7 @@
   s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm;
 EOF
 
-275. synclines.at:214:  ok
-./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
 stdout:
 input.y:1: #error expected '{' before 'break'
 %union break
@@ -8280,37 +8334,15 @@
           yydestruct ("Error: discarding",
           ^~~~~~~~~~
 ./synclines.at:255: grep '^input.y:1' stdout
-stderr:
-stdout:
 stdout:
+./synclines.at:291: $CC $CFLAGS $CPPFLAGS -c syncline.c
 input.y:1: #error expected '{' before 'break'
 input.y:1: #error expected '{' before 'break'
+./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
 276. synclines.at:237:  ok
-./conflicts.at:1096:  $PREPARSER ./input
 ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
 stderr:
-syntax error
-./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
-
-235. conflicts.at:1096:  ok
-
-./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
-277. synclines.at:264: testing Postprologue syncline ...
 
-./synclines.at:264: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-278. synclines.at:291: testing Action syncline ...
-./synclines.at:291: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
-./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
-./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
-279. synclines.at:310: testing Epilogue syncline ...
-./synclines.at:310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./synclines.at:264: $CC $CFLAGS $CPPFLAGS -c syncline.c
-./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
-./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
-./synclines.at:291: $CC $CFLAGS $CPPFLAGS -c syncline.c
-stderr:
 syncline.c: In function 'foo':
 syncline.c:4:2: error: #error "4"
     4 | #error "4"
@@ -8343,13 +8375,11 @@
 EOF
 
 stdout:
+./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
 syncline.c:4: #error "4"
 ./synclines.at:264: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77
-./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
-./synclines.at:264: $CC $CFLAGS $CPPFLAGS -c input.c
-./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
-./synclines.at:310: $CC $CFLAGS $CPPFLAGS -c syncline.c
 stderr:
+./synclines.at:264: $CC $CFLAGS $CPPFLAGS -c input.c
 syncline.c: In function 'foo':
 syncline.c:4:2: error: #error "4"
     4 | #error "4"
@@ -8382,16 +8412,20 @@
 EOF
 
 stdout:
+./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
 syncline.c:4: #error "4"
 ./synclines.at:291: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77
-stderr:
-./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
+279. synclines.at:310: testing Epilogue syncline ...
+./synclines.at:310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 ./synclines.at:291: $CC $CFLAGS $CPPFLAGS -c input.c
-syncline.c: In function 'foo':
-syncline.c:4:2: error: #error "4"
-    4 | #error "4"
+./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
+stderr:
+input.y:13:2: error: #error "13"
+   13 | #error "13"
       |  ^~~~~
-./synclines.at:310: "$PERL" -p -0777 - stderr <<\EOF || exit 77
+./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+./synclines.at:264: "$PERL" -p -0777 - stderr <<\EOF || exit 77
   # Remove left-hand margin.
   s/^[\d ]{6}\| //gm;
 
@@ -8418,13 +8452,16 @@
   s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm;
 EOF
 
-./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+stdout:
+input.y:13: #error "13"
+./synclines.at:264: cat stdout
 stderr:
-input.y:13:2: error: #error "13"
-   13 | #error "13"
+./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
+input.y: In function 'yyparse':
+input.y:8:2: error: #error "8"
+    8 | #error "8"
       |  ^~~~~
-stdout:
-./synclines.at:264: "$PERL" -p -0777 - stderr <<\EOF || exit 77
+./synclines.at:291: "$PERL" -p -0777 - stderr <<\EOF || exit 77
   # Remove left-hand margin.
   s/^[\d ]{6}\| //gm;
 
@@ -8451,20 +8488,25 @@
   s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm;
 EOF
 
-syncline.c:4: #error "4"
-./synclines.at:310: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77
+277. synclines.at:264:  ok
 stdout:
-./synclines.at:310: $CC $CFLAGS $CPPFLAGS -c input.c
-input.y:13: #error "13"
-./synclines.at:264: cat stdout
-stderr:
+input.y:8: #error "8"
+./synclines.at:310: $CC $CFLAGS $CPPFLAGS -c syncline.c
+./synclines.at:291: cat stdout
+./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+
+278. synclines.at:291:  ok
 ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
-input.y: In function 'yyparse':
-input.y:8:2: error: #error "8"
-    8 | #error "8"
+
+./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+stderr:
+280. synclines.at:327: testing %code top syncline ...
+./synclines.at:327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+syncline.c: In function 'foo':
+syncline.c:4:2: error: #error "4"
+    4 | #error "4"
       |  ^~~~~
-277. synclines.at:264:  ok
-./synclines.at:291: "$PERL" -p -0777 - stderr <<\EOF || exit 77
+./synclines.at:310: "$PERL" -p -0777 - stderr <<\EOF || exit 77
   # Remove left-hand margin.
   s/^[\d ]{6}\| //gm;
 
@@ -8492,13 +8534,17 @@
 EOF
 
 stdout:
-./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
-input.y:8: #error "8"
-./synclines.at:291: cat stdout
-278. synclines.at:291:  ok
-
 ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
+syncline.c:4: #error "4"
+./synclines.at:310: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77
+281. synclines.at:346: testing %destructor syncline ...
+./synclines.at:346: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+./synclines.at:310: $CC $CFLAGS $CPPFLAGS -c input.c
+./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+./synclines.at:327: $CC $CFLAGS $CPPFLAGS -c syncline.c
+./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wnone $file
 stderr:
+./synclines.at:346: $CC $CFLAGS $CPPFLAGS -c syncline.c
 input.y:8:2: error: #error "8"
     8 | #error "8"
       |  ^~~~~
@@ -8529,29 +8575,19 @@
   s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm;
 EOF
 
-
 stdout:
 input.y:8: #error "8"
 ./synclines.at:310: cat stdout
-./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
 279. synclines.at:310:  ok
-280. synclines.at:327: testing %code top syncline ...
-./synclines.at:327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wnone $file
 
-281. synclines.at:346: testing %destructor syncline ...
-./synclines.at:346: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./synclines.at:327: $CC $CFLAGS $CPPFLAGS -c syncline.c
-282. synclines.at:370: testing %printer syncline ...
-./synclines.at:370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./synclines.at:346: $CC $CFLAGS $CPPFLAGS -c syncline.c
+stderr:
 ./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Werror $file
 stderr:
 syncline.c: In function 'foo':
 syncline.c:4:2: error: #error "4"
     4 | #error "4"
       |  ^~~~~
-./synclines.at:327: "$PERL" -p -0777 - stderr <<\EOF || exit 77
+./synclines.at:346: "$PERL" -p -0777 - stderr <<\EOF || exit 77
   # Remove left-hand margin.
   s/^[\d ]{6}\| //gm;
 
@@ -8578,16 +8614,11 @@
   s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm;
 EOF
 
-stdout:
-syncline.c:4: #error "4"
-./synclines.at:327: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77
-stderr:
-./synclines.at:327: $CC $CFLAGS $CPPFLAGS -c input.c
 syncline.c: In function 'foo':
 syncline.c:4:2: error: #error "4"
     4 | #error "4"
       |  ^~~~~
-./synclines.at:346: "$PERL" -p -0777 - stderr <<\EOF || exit 77
+./synclines.at:327: "$PERL" -p -0777 - stderr <<\EOF || exit 77
   # Remove left-hand margin.
   s/^[\d ]{6}\| //gm;
 
@@ -8614,18 +8645,24 @@
   s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm;
 EOF
 
-./synclines.at:370: $CC $CFLAGS $CPPFLAGS -c syncline.c
+282. synclines.at:370: testing %printer syncline ...
+./synclines.at:370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+stdout:
 stdout:
 syncline.c:4: #error "4"
 ./synclines.at:346: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77
+syncline.c:4: #error "4"
 ./synclines.at:346: $CC $CFLAGS $CPPFLAGS -c input.c
+./synclines.at:327: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77
+./synclines.at:327: $CC $CFLAGS $CPPFLAGS -c input.c
 ./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wnone $file
+./synclines.at:370: $CC $CFLAGS $CPPFLAGS -c syncline.c
 stderr:
-stderr:
+input.y: In function 'yydestruct':
 input.y:2:2: error: #error "2"
     2 | #error "2"
       |  ^~~~~
-./synclines.at:327: "$PERL" -p -0777 - stderr <<\EOF || exit 77
+./synclines.at:346: "$PERL" -p -0777 - stderr <<\EOF || exit 77
   # Remove left-hand margin.
   s/^[\d ]{6}\| //gm;
 
@@ -8652,11 +8689,11 @@
   s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm;
 EOF
 
-syncline.c: In function 'foo':
-syncline.c:4:2: error: #error "4"
-    4 | #error "4"
+stderr:
+input.y:2:2: error: #error "2"
+    2 | #error "2"
       |  ^~~~~
-./synclines.at:370: "$PERL" -p -0777 - stderr <<\EOF || exit 77
+./synclines.at:327: "$PERL" -p -0777 - stderr <<\EOF || exit 77
   # Remove left-hand margin.
   s/^[\d ]{6}\| //gm;
 
@@ -8683,39 +8720,20 @@
   s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm;
 EOF
 
-stderr:
 stdout:
 input.y:2: #error "2"
-./synclines.at:327: cat stdout
-input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
-input.y: warning: shift/reduce conflict on token J [-Wcounterexamples]
-time limit exceeded: 6.000000
-  First example: H i . J K $end
-  Shift derivation
-    $accept
-    `-> 0: a                       $end
-           `-> 2: H i
-                    `-> 4: i . J K
-  Second example: H i . J $end
-  Reduce derivation
-    $accept
-    `-> 0: s                     $end
-           `-> 1: a            J
-                  `-> 2: H i .
-input.y:4.4-6: warning: rule useless in parser due to conflicts [-Wother]
-./counterexample.at:451: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr
 stdout:
-syncline.c:4: #error "4"
-./synclines.at:370: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77
-280. synclines.at:327:  ok
-./counterexample.at:451: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
-./synclines.at:370: $CC $CFLAGS $CPPFLAGS -c input.c
+./synclines.at:346: cat stdout
+input.y:2: #error "2"
+./synclines.at:327: cat stdout
+281. synclines.at:346:  ok
 stderr:
-input.y: In function 'yydestruct':
-input.y:2:2: error: #error "2"
-    2 | #error "2"
+280. synclines.at:327:  ok
+syncline.c: In function 'foo':
+syncline.c:4:2: error: #error "4"
+    4 | #error "4"
       |  ^~~~~
-./synclines.at:346: "$PERL" -p -0777 - stderr <<\EOF || exit 77
+./synclines.at:370: "$PERL" -p -0777 - stderr <<\EOF || exit 77
   # Remove left-hand margin.
   s/^[\d ]{6}\| //gm;
 
@@ -8742,16 +8760,18 @@
   s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm;
 EOF
 
-./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Werror $file
-
 stdout:
-input.y:2: #error "2"
-./synclines.at:346: cat stdout
-281. synclines.at:346:  ok
+syncline.c:4: #error "4"
+./synclines.at:370: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77
 
+
+./synclines.at:370: $CC $CFLAGS $CPPFLAGS -c input.c
+./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Werror $file
+284. synclines.at:440: testing syncline escapes: glr.c ...
+./synclines.at:440: $CC $CFLAGS $CPPFLAGS \"\\\"\".c -o \"\\\"\" || exit 77
 283. synclines.at:440: testing syncline escapes: yacc.c ...
-stderr:
 ./synclines.at:440: $CC $CFLAGS $CPPFLAGS \"\\\"\".c -o \"\\\"\" || exit 77
+stderr:
 input.y: In function 'yy_symbol_value_print':
 input.y:2:2: error: #error "2"
     2 | #error "2"
@@ -8783,53 +8803,51 @@
   s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm;
 EOF
 
+./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
 stdout:
-284. synclines.at:440: testing syncline escapes: glr.c ...
-./synclines.at:440: $CC $CFLAGS $CPPFLAGS \"\\\"\".c -o \"\\\"\" || exit 77
 input.y:2: #error "2"
 ./synclines.at:370: cat stdout
-./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
-282. synclines.at:370:  ok
 ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+282. synclines.at:370:  ok
+./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
 
+./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
 ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
 285. synclines.at:440: testing syncline escapes: lalr1.cc ...
-./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+stderr:
 ./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77
-./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
+stdout:
 ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o \"\\\"\".c \"\\\"\".y
 stderr:
 stdout:
-stderr:
 ./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o \"\\\"\".c \"\\\"\".y
 ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
-stdout:
 ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
-./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o \"\\\"\".c \"\\\"\".y
 ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
 ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
-./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
 ./synclines.at:440: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o \"\\\"\" \"\\\"\".c $LIBS
-./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
 ./synclines.at:440: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o \"\\\"\" \"\\\"\".c $LIBS
 ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
-stderr:
 ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
-stdout:
-./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o \"\\\"\".cc \"\\\"\".y
+./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
+./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wnone $file
 stderr:
 stdout:
 ./conflicts.at:1096:  $PREPARSER ./input
-./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wnone $file
 stderr:
 syntax error
 ./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 236. conflicts.at:1096:  ok
+stdout:
+./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o \"\\\"\".cc \"\\\"\".y
 
 ./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Werror $file
-./synclines.at:440: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS
 286. synclines.at:440: testing syncline escapes: glr.cc ...
 ./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77
+./synclines.at:440: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS
 ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
 ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
 ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
@@ -8840,9 +8858,9 @@
 ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
 ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
 
+./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wnone $file
 stderr:
 stdout:
-./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wnone $file
 ./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o \"\\\"\".cc \"\\\"\".y
 287. synclines.at:440: testing syncline escapes: glr2.cc ...
 ./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77
@@ -8851,17 +8869,17 @@
 ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
 ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
 ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
+./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
 stderr:
 stdout:
-./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
 ./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o \"\\\"\".cc \"\\\"\".y
 ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
 ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
 ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
-./synclines.at:440: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS
 ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
 ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
 ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
+./synclines.at:440: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS
 ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
 ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Werror $file
 ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wnone $file
@@ -8908,86 +8926,78 @@
 ./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc -d input.y
 ./synclines.at:497: mv input.cc with.cc
 ./synclines.at:497: mv input.hh with.hh
-stderr:
 ./synclines.at:497: grep -v '#line' with.cc >expout
 ./synclines.at:497: cat without.cc
-input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
-input.y: warning: shift/reduce conflict on token J [-Wcounterexamples]
-time limit exceeded: 6.000000
-  First example     H i . J K $end
-  Shift derivation  $accept -> [ a -> [ H i -> [ i . J K ] ] $end ]
-  Second example    H i . J $end
-  Reduce derivation $accept -> [ s -> [ a -> [ H i . ] J ] $end ]
-input.y:4.4-6: warning: rule useless in parser due to conflicts [-Wother]
-./counterexample.at:451: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
-267. counterexample.at:441:  ok
 ./synclines.at:497: grep -v '#line' with.hh >expout
 ./synclines.at:497: cat without.hh
 290. synclines.at:497:  ok
 
-
 291. synclines.at:497: testing %no-lines: glr.cc ...
 ./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --no-lines -o input.cc -d input.y
-292. synclines.at:497: testing %no-lines: glr2.cc ...
-./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --no-lines -o input.cc -d input.y
 ./synclines.at:497: mv input.cc without.cc
 ./synclines.at:497: mv input.hh without.hh
 ./synclines.at:497: grep '#line' *.cc *.hh
 ./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc -d input.y
+./synclines.at:497: mv input.cc with.cc
+./synclines.at:497: mv input.hh with.hh
+./synclines.at:497: grep -v '#line' with.cc >expout
+./synclines.at:497: cat without.cc
+./synclines.at:497: grep -v '#line' with.hh >expout
+./synclines.at:497: cat without.hh
+291. synclines.at:497:  ok
+
+292. synclines.at:497: testing %no-lines: glr2.cc ...
+./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --no-lines -o input.cc -d input.y
 ./synclines.at:497: mv input.cc without.cc
 ./synclines.at:497: mv input.hh without.hh
 ./synclines.at:497: grep '#line' *.cc *.hh
 ./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc -d input.y
+stderr:
+stdout:
+284. synclines.at:440:  ok
 ./synclines.at:497: mv input.cc with.cc
 ./synclines.at:497: mv input.hh with.hh
 ./synclines.at:497: grep -v '#line' with.cc >expout
-./synclines.at:497: mv input.cc with.cc
-./synclines.at:497: cat without.cc
-./synclines.at:497: mv input.hh with.hh
-./synclines.at:497: grep -v '#line' with.cc >expout
-stderr:
-stdout:
-./synclines.at:497: grep -v '#line' with.hh >expout
+
 ./synclines.at:497: cat without.cc
-./synclines.at:497: cat without.hh
-284. synclines.at:440:  ok
 ./synclines.at:497: grep -v '#line' with.hh >expout
 ./synclines.at:497: cat without.hh
-291. synclines.at:497:  ok
 292. synclines.at:497:  ok
-
-
-
 293. synclines.at:507: testing Output columns ...
 ./synclines.at:540: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+
 294. headers.at:56: testing Invalid CPP guards:  --defines=input/input.h ...
-295. headers.at:57: testing Invalid CPP guards:  --defines=9foo.h ...
-./headers.at:57: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --defines=9foo.h --output=9foo.c 9foo.y
 ./headers.at:56: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --defines=input/input.h --output=input/input.c input/input.y
-./headers.at:57: $CC $CFLAGS $CPPFLAGS  -c -o 9foo.o -I. -c 9foo.c 
 ./synclines.at:541: sed -ne '/--BEGIN/,/--END/{' \
     -e '/input.c/s/ [0-9]* / LINE /;' \
     -e 'p;}' \
     input.c
 293. synclines.at:507:  ok
+
 ./headers.at:56: $CC $CFLAGS $CPPFLAGS  -c -o input/input.o -I. -c input/input.c 
+295. headers.at:57: testing Invalid CPP guards:  --defines=9foo.h ...
+./headers.at:57: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --defines=9foo.h --output=9foo.c 9foo.y
+./headers.at:57: $CC $CFLAGS $CPPFLAGS  -c -o 9foo.o -I. -c 9foo.c 
+stderr:
+stdout:
+294. headers.at:56:  ok
 
 296. headers.at:58: testing Invalid CPP guards: %glr-parser --defines=input/input.h ...
 ./headers.at:58: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --defines=input/input.h --output=input/input.c input/input.y
-./headers.at:58: $CC $CFLAGS $CPPFLAGS  -c -o input/input.o -I. -c input/input.c 
-stderr:
-stdout:
 stderr:
 stdout:
 295. headers.at:57:  ok
-294. headers.at:56:  ok
-
+./headers.at:58: $CC $CFLAGS $CPPFLAGS  -c -o input/input.o -I. -c input/input.c 
 
 297. headers.at:59: testing Invalid CPP guards: %glr-parser --defines=9foo.h ...
 ./headers.at:59: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --defines=9foo.h --output=9foo.c 9foo.y
+./headers.at:59: $CC $CFLAGS $CPPFLAGS  -c -o 9foo.o -I. -c 9foo.c 
+stderr:
+stdout:
+285. synclines.at:440:  ok
+
 298. headers.at:67: testing export YYLTYPE ...
 ./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --header -o input.c input.y
-./headers.at:59: $CC $CFLAGS $CPPFLAGS  -c -o 9foo.o -I. -c 9foo.c 
 ./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --header -o input.c input.y -Werror
 stderr:
 input.y:11.1-18: error: deprecated directive: '%name-prefix "my_"', use '%define api.prefix {my_}' [-Werror=deprecated]
@@ -8995,253 +9005,148 @@
 ./headers.at:85: sed 's,.*/$,,' stderr 1>&2
 ./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --header -o input.c input.y --warnings=error
 ./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --header -o input.c input.y -Wnone,none -Werror --trace=none
-./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --header -o input.c input.y --warnings=none -Werror --trace=none
-./headers.at:102: $CC $CFLAGS $CPPFLAGS  -c -o caller.o caller.c 
-stderr:
-stdout:
-./headers.at:103: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
 stderr:
 stdout:
-./headers.at:104: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o caller caller.o input.o $LIBS
-stderr:
-stdout:
-./headers.at:105:  $PREPARSER ./caller
-stderr:
-./headers.at:105: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-298. headers.at:67:  ok
+296. headers.at:58:  ok
 
+./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret --header -o input.c input.y --warnings=none -Werror --trace=none
 299. headers.at:177: testing Sane headers:  ...
 ./headers.at:177: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o input.c input.y
+./headers.at:102: $CC $CFLAGS $CPPFLAGS  -c -o caller.o caller.c 
 ./headers.at:177: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
 stderr:
 stdout:
-296. headers.at:58:  ok
+stderr:
+stdout:
+297. headers.at:59:  ok
+./headers.at:103: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
 
 300. headers.at:178: testing Sane headers: %locations %debug ...
 ./headers.at:178: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o input.c input.y
+./headers.at:178: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
 stderr:
 stdout:
-285. synclines.at:440:  ok
-stderr:
-stdout:
-297. headers.at:59:  ok
-
+286. synclines.at:440:  ok
 
-./headers.at:178: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
 301. headers.at:180: testing Sane headers: %glr-parser ...
 ./headers.at:180: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o input.c input.y
-302. headers.at:181: testing Sane headers: %locations %debug %glr-parser ...
-./headers.at:181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o input.c input.y
 ./headers.at:180: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
-./headers.at:181: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
 stderr:
 stdout:
-./headers.at:177: $CC $CFLAGS $CPPFLAGS  -c -o $h.o $h.c 
+./conflicts.at:1096:  $PREPARSER ./input
+stderr:
+syntax error
+./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stdout:
-299. headers.at:177:  ok
+237. conflicts.at:1096:  ok
+./headers.at:104: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o caller caller.o input.o $LIBS
+
+302. headers.at:181: testing Sane headers: %locations %debug %glr-parser ...
+./headers.at:181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o input.c input.y
+stderr:
+stdout:
+./headers.at:105:  $PREPARSER ./caller
+stderr:
+./headers.at:105: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+298. headers.at:67:  ok
 
+./headers.at:181: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
 303. headers.at:183: testing Sane headers: api.pure ...
 ./headers.at:183: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o input.c input.y
-./headers.at:183: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
-stderr:
-input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
-input.y: warning: 6 reduce/reduce conflicts [-Wconflicts-rr]
-input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples]
-  First example: . c A A $end
-  First reduce derivation
-    $accept
-    `-> 0: a                                   $end
-           `-> 1: b               d
-                  `-> 3: %empty . `-> 6: c A A
-  Second example: . c A A $end
-  Second reduce derivation
-    $accept
-    `-> 0: a                                   $end
-           `-> 2: c               d
-                  `-> 4: %empty . `-> 6: c A A
-input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples]
-time limit exceeded: 6.000000
-  First example: b . c A A $end
-  First reduce derivation
-    $accept
-    `-> 0: a                                                   $end
-           `-> 1: b d
-                    `-> 5: a
-                           `-> 1: b               d
-                                  `-> 3: %empty . `-> 6: c A A
-  Second example: b . A $end
-  Second reduce derivation
-    $accept
-    `-> 0: a                                 $end
-           `-> 1: b d
-                    `-> 6: c               A
-                           `-> 4: %empty .
-input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples]
-time limit exceeded: 6.000000
-  First example: c . c A A $end
-  First reduce derivation
-    $accept
-    `-> 0: a                                                   $end
-           `-> 2: c d
-                    `-> 5: a
-                           `-> 1: b               d
-                                  `-> 3: %empty . `-> 6: c A A
-  Second example: c . A $end
-  Second reduce derivation
-    $accept
-    `-> 0: a                                 $end
-           `-> 2: c d
-                    `-> 6: c               A
-                           `-> 4: %empty .
-input.y: warning: shift/reduce conflict on token A [-Wcounterexamples]
-time limit exceeded: 6.000000
-  First example: b c . A
-  Shift derivation
-    a
-    `-> 1: b d
-             `-> 6: c . A
-  Second example: b c . c A A $end
-  Reduce derivation
-    $accept
-    `-> 0: a                                                                   $end
-           `-> 1: b d
-                    `-> 5: a
-                           `-> 2: c d
-                                    `-> 5: a
-                                           `-> 1: b               d
-                                                  `-> 3: %empty . `-> 6: c A A
-input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples]
-  First example: b c . c A A $end
-  First reduce derivation
-    $accept
-    `-> 0: a                                                                   $end
-           `-> 1: b d
-                    `-> 5: a
-                           `-> 2: c d
-                                    `-> 5: a
-                                           `-> 1: b               d
-                                                  `-> 3: %empty . `-> 6: c A A
-  Second example: b c . A $end
-  Second reduce derivation
-    $accept
-    `-> 0: a                                                 $end
-           `-> 1: b d
-                    `-> 5: a
-                           `-> 2: c d
-                                    `-> 6: c               A
-                                           `-> 4: %empty .
-input.y: warning: shift/reduce conflict on token A [-Wcounterexamples]
-  First example: b c . A
-  Shift derivation
-    a
-    `-> 1: b d
-             `-> 6: c . A
-  Second example: b c . A $end
-  Reduce derivation
-    $accept
-    `-> 0: a                                                 $end
-           `-> 1: b d
-                    `-> 5: a
-                           `-> 2: c d
-                                    `-> 6: c               A
-                                           `-> 4: %empty .
-input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples]
-  Example: b d .
-  First reduce derivation
-    a
-    `-> 1: b d .
-  Second reduce derivation
-    a
-    `-> 1: b d
-             `-> 7: d .
-input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples]
-  Example: c d .
-  First reduce derivation
-    a
-    `-> 2: c d .
-  Second reduce derivation
-    a
-    `-> 2: c d
-             `-> 7: d .
-input.y:5.4: warning: rule useless in parser due to conflicts [-Wother]
-input.y:6.15: warning: rule useless in parser due to conflicts [-Wother]
-./counterexample.at:621: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr
-./counterexample.at:621: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
 stderr:
 stdout:
-286. synclines.at:440:  ok
-
+./headers.at:177: $CC $CFLAGS $CPPFLAGS  -c -o $h.o $h.c 
 stderr:
+./headers.at:183: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
 stdout:
-./conflicts.at:1096:  $PREPARSER ./input
+299. headers.at:177:  ok
+
 304. headers.at:184: testing Sane headers: api.push-pull=both ...
 ./headers.at:184: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o input.c input.y
-stderr:
-syntax error
-./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-237. conflicts.at:1096:  ok
-
 ./headers.at:184: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
-305. headers.at:185: testing Sane headers: api.pure api.push-pull=both ...
-./headers.at:185: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o input.c input.y
-./headers.at:185: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
 stderr:
 stdout:
-stderr:
-./headers.at:178: $CC $CFLAGS $CPPFLAGS  -c -o $h.o $h.c 
-stdout:
 ./headers.at:183: $CC $CFLAGS $CPPFLAGS  -c -o $h.o $h.c 
 stderr:
-stdout:
 stderr:
 stdout:
-300. headers.at:178:  ok
+stdout:
 303. headers.at:183:  ok
+./headers.at:178: $CC $CFLAGS $CPPFLAGS  -c -o $h.o $h.c 
 
+stderr:
+stdout:
+305. headers.at:185: testing Sane headers: api.pure api.push-pull=both ...
+./headers.at:185: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o input.c input.y
+300. headers.at:178:  ok
 
 306. headers.at:187: testing Sane headers: c++ ...
 ./headers.at:187: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o input.cc input.y
-307. headers.at:188: testing Sane headers: %locations %debug c++ ...
-./headers.at:188: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o input.cc input.y
-./headers.at:187: $CXX $CPPFLAGS  $CXXFLAGS -c -o input.o input.cc 
-./headers.at:188: $CXX $CPPFLAGS  $CXXFLAGS -c -o input.o input.cc 
+./headers.at:185: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
 stderr:
+./headers.at:187: $CXX $CPPFLAGS  $CXXFLAGS -c -o input.o input.cc 
 stdout:
 ./headers.at:184: $CC $CFLAGS $CPPFLAGS  -c -o $h.o $h.c 
 stderr:
 stdout:
 304. headers.at:184:  ok
 
+307. headers.at:188: testing Sane headers: %locations %debug c++ ...
+./headers.at:188: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o input.cc input.y
+./headers.at:188: $CXX $CPPFLAGS  $CXXFLAGS -c -o input.o input.cc 
 stderr:
 stdout:
 ./headers.at:185: $CC $CFLAGS $CPPFLAGS  -c -o $h.o $h.c 
-308. headers.at:189: testing Sane headers: c++ api.value.type=variant parse.assert ...
-./headers.at:189: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o input.cc input.y
 stderr:
 stdout:
 305. headers.at:185:  ok
 
+308. headers.at:189: testing Sane headers: c++ api.value.type=variant parse.assert ...
+./headers.at:189: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o input.cc input.y
+stderr:
+stdout:
+./headers.at:180: $CC $CFLAGS $CPPFLAGS  -c -o $h.o $h.c 
+stderr:
 ./headers.at:189: $CXX $CPPFLAGS  $CXXFLAGS -c -o input.o input.cc 
+stdout:
+301. headers.at:180:  ok
+
 309. headers.at:191: testing Sane headers: %locations c++ %glr-parser ...
 ./headers.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o input.cc input.y
 ./headers.at:191: $CXX $CPPFLAGS  $CXXFLAGS -c -o input.o input.cc 
 stderr:
+input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
+input.y: warning: shift/reduce conflict on token J [-Wcounterexamples]
+time limit exceeded: 6.000000
+  First example: H i . J K $end
+  Shift derivation
+    $accept
+    `-> 0: a                       $end
+           `-> 2: H i
+                    `-> 4: i . J K
+  Second example: H i . J $end
+  Reduce derivation
+    $accept
+    `-> 0: s                     $end
+           `-> 1: a            J
+                  `-> 2: H i .
+input.y:4.4-6: warning: rule useless in parser due to conflicts [-Wother]
+./counterexample.at:451: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr
+./counterexample.at:451: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
+stderr:
 stdout:
-./headers.at:180: $CC $CFLAGS $CPPFLAGS  -c -o $h.o $h.c 
+./headers.at:181: $CC $CFLAGS $CPPFLAGS  -c -o $h.o $h.c 
 stderr:
 stdout:
-301. headers.at:180:  ok
+302. headers.at:181:  ok
 
 310. headers.at:199: testing Several parsers ...
 ./headers.at:320: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o x1.c x1.y
 ./headers.at:320: $CC $CFLAGS $CPPFLAGS  -c -o x1.o x1.c 
 stderr:
 stdout:
-./headers.at:181: $CC $CFLAGS $CPPFLAGS  -c -o $h.o $h.c 
-stderr:
-stdout:
-302. headers.at:181:  ok
+287. synclines.at:440:  ok
 
 311. actions.at:24: testing Midrule actions ...
 ./actions.at:59: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -v -o input.c input.y
@@ -9253,6 +9158,9 @@
 ./headers.at:321: $CC $CFLAGS $CPPFLAGS  -c -o x2.o x2.c 
 stderr:
 stdout:
+./headers.at:187: $CXX $CPPFLAGS  $CXXFLAGS -c -o $h.o $h.cc 
+stderr:
+stdout:
 ./actions.at:61:  $PREPARSER ./input
 stderr:
 ./actions.at:61: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -9268,14 +9176,30 @@
 ./headers.at:322: $CC $CFLAGS $CPPFLAGS  -c -o x3.o x3.c 
 stderr:
 stdout:
+./headers.at:188: $CXX $CPPFLAGS  $CXXFLAGS -c -o $h.o $h.cc 
+stderr:
+stdout:
+./headers.at:189: $CXX $CPPFLAGS  $CXXFLAGS -c -o $h.o $h.cc 
+stderr:
+stdout:
+./headers.at:191: $CXX $CPPFLAGS  $CXXFLAGS -c -o $h.o $h.cc 
+stderr:
+stdout:
 ./headers.at:187: $CXX $CPPFLAGS  $CXXFLAGS -c -o $h.o $h.cc 
 stderr:
 stdout:
-287. synclines.at:440:  ok
+306. headers.at:187:  ok
 
 313. actions.at:122: testing Implicitly empty rule ...
 ./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -Wempty-rule 1.y
+stderr:
+stdout:
+./actions.at:111:  $PREPARSER ./input
+stderr:
 ./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wempty-rule 1.y -Werror
+./actions.at:111: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+312. actions.at:72:  ok
+
 stderr:
 1.y:11.17-18: error: empty rule without %empty [-Werror=empty-rule]
    11 | a: /* empty. */ {};
@@ -9283,12 +9207,29 @@
       |                 %empty
 1.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
 ./actions.at:133: sed 's,.*/$,,' stderr 1>&2
+314. actions.at:172: testing Invalid uses of %empty ...
+./actions.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret one.y
 ./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wempty-rule 1.y --warnings=error
+./actions.at:192: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -u one.y
+./actions.at:202: sed -e '1,8d' one.y
+./actions.at:219: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret two.y
 ./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wempty-rule 1.y -Wnone,none -Werror --trace=none
+314. actions.at:172:  ok
 ./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -Wempty-rule 1.y --warnings=none -Werror --trace=none
+
+stderr:
+stdout:
+./headers.at:188: $CXX $CPPFLAGS  $CXXFLAGS -c -o $h.o $h.cc 
+315. actions.at:240: testing Valid uses of %empty ...
+./actions.at:259: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 ./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret 2.y
+./actions.at:259: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret 2.y -Werror
 stderr:
+stdout:
+./headers.at:322: echo "x3" >>expout
+./headers.at:323: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o x4.c x4.y
+stderr:
 2.y:11.17-18: error: empty rule without %empty [-Werror=empty-rule]
    11 | a: /* empty. */ {};
       |                 ^~
@@ -9303,30 +9244,16 @@
 stderr:
 stdout:
 stderr:
-./headers.at:188: $CXX $CPPFLAGS  $CXXFLAGS -c -o $h.o $h.cc 
 stdout:
-./actions.at:111:  $PREPARSER ./input
+./headers.at:189: $CXX $CPPFLAGS  $CXXFLAGS -c -o $h.o $h.cc 
+./headers.at:191: $CXX $CPPFLAGS  $CXXFLAGS -c -o $h.o $h.cc 
+./headers.at:323: $CC $CFLAGS $CPPFLAGS  -c -o x4.o x4.c 
 ./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret 2.y -Wnone,none -Werror --trace=none
 stderr:
-./actions.at:111: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-312. actions.at:72:  ok
-./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret 2.y --warnings=none -Werror --trace=none
-
-314. actions.at:172: testing Invalid uses of %empty ...
-./actions.at:161: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -Wno-empty-rule 2.y
-./actions.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret one.y
-./actions.at:192: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -u one.y
-./actions.at:202: sed -e '1,8d' one.y
-313. actions.at:122:  ok
-./actions.at:219: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret two.y
-
-314. actions.at:172:  ok
-
-315. actions.at:240: testing Valid uses of %empty ...
-./actions.at:259: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
-stderr:
 stdout:
-./headers.at:189: $CXX $CPPFLAGS  $CXXFLAGS -c -o $h.o $h.cc 
+308. headers.at:189:  ok
+
+./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret 2.y --warnings=none -Werror --trace=none
 316. actions.at:270: testing Add missing %empty ...
 ./actions.at:285: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --update -Wall input.y
 stderr:
@@ -9337,190 +9264,132 @@
 input.y:9.3: warning: empty rule without %empty [-Wempty-rule]
 bison: file 'input.y' was updated (backup: 'input.y~')
 ./actions.at:286: cat input.y
+./actions.at:161: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -Wno-empty-rule 2.y
 ./actions.at:300: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall input.y
-./actions.at:259: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 316. actions.at:270:  ok
+313. actions.at:122:  ok
 
 stderr:
 stdout:
-./headers.at:191: $CXX $CPPFLAGS  $CXXFLAGS -c -o $h.o $h.cc 
-317. actions.at:365: testing Initial location: yacc.c  ...
-./actions.at:365: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
-./actions.at:365: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-stderr:
-stdout:
-./headers.at:322: echo "x3" >>expout
-./headers.at:323: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o x4.c x4.y
-./headers.at:323: $CC $CFLAGS $CPPFLAGS  -c -o x4.o x4.c 
-stderr:
-stdout:
-./headers.at:187: $CXX $CPPFLAGS  $CXXFLAGS -c -o $h.o $h.cc 
-stderr:
-stdout:
 ./actions.at:260:  $PREPARSER ./input
 stderr:
+
 ./actions.at:260: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 315. actions.at:240:  ok
-stderr:
-stdout:
-306. headers.at:187:  ok
-
-
+317. actions.at:365: testing Initial location: yacc.c  ...
+./actions.at:365: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 318. actions.at:366: testing Initial location: yacc.c api.pure=full ...
+
 ./actions.at:366: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+./actions.at:365: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 319. actions.at:367: testing Initial location: yacc.c api.pure %parse-param { int x } ...
 ./actions.at:367: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 ./actions.at:366: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./actions.at:367: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
-./actions.at:365:  $PREPARSER ./input
+./headers.at:188: $CXX $CPPFLAGS  $CXXFLAGS -c -o $h.o $h.cc 
+stderr:
+stdout:
+./headers.at:191: $CXX $CPPFLAGS  $CXXFLAGS -c -o $h.o $h.cc 
+stderr:
+stdout:
+./actions.at:367:  $PREPARSER ./input
 stderr:
 1.1
 1.1: syntax error
-./actions.at:365: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-317. actions.at:365:  ok
+./actions.at:367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+319. actions.at:367:  ok
 
-stderr:
-input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
-input.y: warning: 6 reduce/reduce conflicts [-Wconflicts-rr]
-input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples]
-  First example            . c A A $end
-  First reduce derivation  $accept -> [ a -> [ b -> [ . ] d -> [ c A A ] ] $end ]
-  Second example           . c A A $end
-  Second reduce derivation $accept -> [ a -> [ c -> [ . ] d -> [ c A A ] ] $end ]
-input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples]
-time limit exceeded: 6.000000
-  First example            b . c A A $end
-  First reduce derivation  $accept -> [ a -> [ b d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] $end ]
-  Second example           b . A $end
-  Second reduce derivation $accept -> [ a -> [ b d -> [ c -> [ . ] A ] ] $end ]
-input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples]
-time limit exceeded: 6.000000
-  First example            c . c A A $end
-  First reduce derivation  $accept -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] $end ]
-  Second example           c . A $end
-  Second reduce derivation $accept -> [ a -> [ c d -> [ c -> [ . ] A ] ] $end ]
-input.y: warning: shift/reduce conflict on token A [-Wcounterexamples]
-time limit exceeded: 6.000000
-  First example     b c . A
-  Shift derivation  a -> [ b d -> [ c . A ] ]
-  Second example    b c . c A A $end
-  Reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] ] ] $end ]
-input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples]
-  First example            b c . c A A $end
-  First reduce derivation  $accept -> [ a -> [ b d -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] ] ] $end ]
-  Second example           b c . A $end
-  Second reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ c -> [ . ] A ] ] ] ] $end ]
-input.y: warning: shift/reduce conflict on token A [-Wcounterexamples]
-  First example     b c . A
-  Shift derivation  a -> [ b d -> [ c . A ] ]
-  Second example    b c . A $end
-  Reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ c -> [ . ] A ] ] ] ] $end ]
-input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples]
-  Example                  b d .
-  First reduce derivation  a -> [ b d . ]
-  Second reduce derivation a -> [ b d -> [ d . ] ]
-input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples]
-  Example                  c d .
-  First reduce derivation  a -> [ c d . ]
-  Second reduce derivation a -> [ c d -> [ d . ] ]
-input.y:5.4: warning: rule useless in parser due to conflicts [-Wother]
-input.y:6.15: warning: rule useless in parser due to conflicts [-Wother]
-./counterexample.at:621: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
 320. actions.at:368: testing Initial location: yacc.c api.push-pull=both ...
 ./actions.at:368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
-270. counterexample.at:610:  ok
-
 ./actions.at:368: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-321. actions.at:369: testing Initial location: yacc.c api.push-pull=both api.pure=full ...
-./actions.at:369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
-./actions.at:369: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+stderr:
 stderr:
 stdout:
+stdout:
 ./actions.at:366:  $PREPARSER ./input
 stderr:
+./actions.at:365:  $PREPARSER ./input
 1.1
 1.1: syntax error
 ./actions.at:366: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-318. actions.at:366:  ok
-stderr:
-stdout:
-./actions.at:367:  $PREPARSER ./input
 stderr:
 1.1
 1.1: syntax error
-./actions.at:367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./actions.at:365: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+317. actions.at:365:  ok
+318. actions.at:366:  ok
 
-319. actions.at:367:  ok
-stderr:
-stdout:
 
-./headers.at:188: $CXX $CPPFLAGS  $CXXFLAGS -c -o $h.o $h.cc 
 322. actions.at:370: testing Initial location: glr.c  ...
 ./actions.at:370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
-323. actions.at:371: testing Initial location: glr.c api.pure ...
-./actions.at:371: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+321. actions.at:369: testing Initial location: yacc.c api.push-pull=both api.pure=full ...
+./actions.at:369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 ./actions.at:370: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./actions.at:371: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./actions.at:369: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
-./headers.at:191: $CXX $CPPFLAGS  $CXXFLAGS -c -o $h.o $h.cc 
+./headers.at:188: $CXX $CPPFLAGS  $CXXFLAGS -c -o $h.o $h.cc 
 stderr:
+stderr:
+input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
+input.y: warning: shift/reduce conflict on token J [-Wcounterexamples]
+time limit exceeded: 6.000000
+  First example     H i . J K $end
+  Shift derivation  $accept -> [ a -> [ H i -> [ i . J K ] ] $end ]
+  Second example    H i . J $end
+  Reduce derivation $accept -> [ s -> [ a -> [ H i . ] J ] $end ]
+input.y:4.4-6: warning: rule useless in parser due to conflicts [-Wother]
+./counterexample.at:451: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
 stdout:
-./headers.at:189: $CXX $CPPFLAGS  $CXXFLAGS -c -o $h.o $h.cc 
+267. counterexample.at:441:  ok
+307. headers.at:188:  ok
 stderr:
 stdout:
-308. headers.at:189:  ok
+./headers.at:323: echo "x4" >>expout
+
 
+./headers.at:324: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o x5.cc x5.y
+323. actions.at:371: testing Initial location: glr.c api.pure ...
+./actions.at:371: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 324. actions.at:372: testing Initial location: lalr1.cc  ...
 ./actions.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+./actions.at:371: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
+./headers.at:324: $CXX $CPPFLAGS  $CXXFLAGS -c -o x5.o x5.cc 
 ./actions.at:368:  $PREPARSER ./input
 stderr:
-./actions.at:372: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 1.1
 1.1: syntax error
 ./actions.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./actions.at:372: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 320. actions.at:368:  ok
+stderr:
+
+stdout:
+309. headers.at:191:  ok
 
 325. actions.at:373: testing Initial location: glr.cc  ...
 ./actions.at:373: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+326. actions.at:374: testing Initial location: glr2.cc  ...
+./actions.at:374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+./actions.at:373: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./actions.at:369:  $PREPARSER ./input
-./actions.at:373: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 1.1
 1.1: syntax error
 ./actions.at:369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./actions.at:374: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 321. actions.at:369:  ok
 
-326. actions.at:374: testing Initial location: glr2.cc  ...
-./actions.at:374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./actions.at:374: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./headers.at:188: $CXX $CPPFLAGS  $CXXFLAGS -c -o $h.o $h.cc 
-stderr:
-stderr:
-stdout:
-stdout:
-./headers.at:191: $CXX $CPPFLAGS  $CXXFLAGS -c -o $h.o $h.cc 
-./headers.at:323: echo "x4" >>expout
-./headers.at:324: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o x5.cc x5.y
-./headers.at:324: $CXX $CPPFLAGS  $CXXFLAGS -c -o x5.o x5.cc 
-stderr:
-stdout:
-./actions.at:371:  $PREPARSER ./input
-stderr:
-1.1
-1.1: syntax error
-./actions.at:371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-323. actions.at:371:  ok
+327. actions.at:383: testing Initial location: yacc.c api.pure=full ...
+./actions.at:383: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+./actions.at:383: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
-
 stdout:
 ./actions.at:370:  $PREPARSER ./input
 stderr:
@@ -9528,81 +9397,98 @@
 1.1: syntax error
 ./actions.at:370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 322. actions.at:370:  ok
-327. actions.at:383: testing Initial location: yacc.c api.pure=full ...
-./actions.at:383: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 
 328. actions.at:394: testing Initial location: yacc.c api.pure=full ...
 ./actions.at:394: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
-./actions.at:383: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+stderr:
+stdout:
 ./actions.at:394: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./actions.at:371:  $PREPARSER ./input
+stderr:
+1.1
+1.1: syntax error
+./actions.at:371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+323. actions.at:371:  ok
+
+329. actions.at:478: testing Location print: yacc.c  ...
+./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 stderr:
 stdout:
 ./actions.at:383:  $PREPARSER ./input
+./actions.at:478: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 
 : syntax error
 ./actions.at:383: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 327. actions.at:383:  ok
-stderr:
-stdout:
+
 stderr:
 stdout:
 ./actions.at:394:  $PREPARSER ./input
-309. headers.at:191:  ok
 stderr:
+330. actions.at:478: testing Location print: glr.c  ...
+./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 0
 0: syntax error
 ./actions.at:394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-
 328. actions.at:394:  ok
+./actions.at:478: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 
-329. actions.at:478: testing Location print: yacc.c  ...
-./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
-
-330. actions.at:478: testing Location print: glr.c  ...
-./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 331. actions.at:478: testing Location print: lalr1.cc  ...
 ./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./actions.at:478: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./actions.at:478: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./actions.at:478: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./headers.at:188: $CXX $CPPFLAGS  $CXXFLAGS -c -o $h.o $h.cc 
-stderr:
-stdout:
-307. headers.at:188:  ok
-
-332. actions.at:478: testing Location print: glr.cc  ...
-./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./actions.at:478: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./headers.at:324: echo "x5" >>expout
+./headers.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o x6.c x6.y
 stderr:
 stdout:
 ./actions.at:478:  $PREPARSER ./input
 stderr:
-./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-329. actions.at:478:  ok
-
-333. actions.at:478: testing Location print: glr2.cc  ...
-./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./actions.at:478: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
+./headers.at:325: $CC $CFLAGS $CPPFLAGS  -c -o x6.o x6.c 
 ./actions.at:372:  $PREPARSER ./input
+./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 1.1
 1.1: syntax error
 ./actions.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+329. actions.at:478:  ok
 324. actions.at:372:  ok
 
-334. actions.at:488: testing Exotic Dollars ...
-./actions.at:532: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -v -o input.c input.y
+
+332. actions.at:478: testing Location print: glr.cc  ...
+./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+333. actions.at:478: testing Location print: glr2.cc  ...
+./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+./actions.at:478: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./actions.at:478: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./actions.at:373:  $PREPARSER ./input
-./actions.at:533: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./headers.at:325: echo "x6" >>expout
+./headers.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o x7.c x7.y
+./headers.at:326: $CC $CFLAGS $CPPFLAGS  -c -o x7.o x7.c 
 stderr:
-1.1
+stdout:
+./actions.at:478:  $PREPARSER ./input
+stderr:
+./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+330. actions.at:478:  ok
+
+334. actions.at:488: testing Exotic Dollars ...
+./actions.at:532: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -v -o input.c input.y
+./actions.at:533: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+stderr:
+stdout:
+./headers.at:326: echo "x7" >>expout
+./headers.at:327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o x8.c x8.y
+stderr:
+stdout:
+./actions.at:373:  $PREPARSER ./input
+stderr:
+./headers.at:327: $CC $CFLAGS $CPPFLAGS  -c -o x8.o x8.c 
+1.1
 1.1: syntax error
 ./actions.at:373: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 325. actions.at:373:  ok
@@ -9615,22 +9501,36 @@
 ./actions.at:478:  $PREPARSER ./input
 stderr:
 ./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-330. actions.at:478:  ok
+331. actions.at:478:  ok
 
-336. actions.at:1048: testing Printers and Destructors with union ...
-./actions.at:1048: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
-./actions.at:1048: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
 ./actions.at:534:  $PREPARSER ./input
 stderr:
+336. actions.at:1048: testing Printers and Destructors with union ...
+./actions.at:1048: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 ./actions.at:534: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./actions.at:562: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+stdout:
+./headers.at:327: echo "x8" >>expout
+./headers.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o x9.cc x9.y
 ./actions.at:562: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./actions.at:1048: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./headers.at:328: $CXX $CPPFLAGS  $CXXFLAGS -c -o x9.o x9.cc 
+stderr:
+stdout:
+./actions.at:563:  $PREPARSER ./input
+stderr:
+./actions.at:563: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+334. actions.at:488:  ok
+
 stderr:
 stdout:
 ./actions.at:1047:  $PREPARSER ./input '(x)'
 stderr:
+337. actions.at:1050: testing Printers and Destructors: %glr-parser ...
+./actions.at:1050: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 sending: '(' (0@0-9)
 sending: 'x' (1@10-19)
 thing (1@10-19): 'x' (1@10-19)
@@ -9653,10 +9553,7 @@
 Freeing nterm input (5@0-19)
 Successful parse.
 ./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stdout:
 ./actions.at:1047:  $PREPARSER ./input '!!!'
-./headers.at:324: echo "x5" >>expout
 stderr:
 sending: '!' (0@0-9)
 sending: '!' (1@10-19)
@@ -9667,11 +9564,9 @@
 Freeing token END (3@30-39)
 Freeing nterm input (5@0-29)
 Successful parse.
-./headers.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o x6.c x6.y
 ./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./actions.at:1047:  $PREPARSER ./input '(y)'
 stderr:
-stderr:
 sending: '(' (0@0-9)
 sending: 'y' (1@10-19)
 10.10-19.18: syntax error, unexpected 'y', expecting 'x'
@@ -9685,10 +9580,8 @@
 Freeing nterm input (2@0-29)
 Successful parse.
 ./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stdout:
+./actions.at:1050: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./actions.at:1047:  $PREPARSER ./input '(xxxxx)(x)(x)y'
-./actions.at:563:  $PREPARSER ./input
-stderr:
 stderr:
 sending: '(' (0@0-9)
 sending: 'x' (1@10-19)
@@ -9726,9 +9619,7 @@
 Freeing token 'y' (13@130-139)
 Parsing FAILED.
 ./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./actions.at:563: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./actions.at:1047:  $PREPARSER ./input '(x)(x)x'
-334. actions.at:488:  ok
 stderr:
 sending: '(' (0@0-9)
 sending: 'x' (1@10-19)
@@ -9750,7 +9641,6 @@
 Freeing token END (7@70-79)
 Parsing FAILED.
 ./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-
 ./actions.at:1047:  $PREPARSER ./input '(x)(x)(x)(x)(x)(x)(x)'
 stderr:
 sending: '(' (0@0-9)
@@ -9798,16 +9688,20 @@
 Parsing FAILED (status 2).
 ./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 335. actions.at:1047:  ok
-./headers.at:325: $CC $CFLAGS $CPPFLAGS  -c -o x6.o x6.c 
 
-337. actions.at:1050: testing Printers and Destructors: %glr-parser ...
-./actions.at:1050: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 338. actions.at:1051: testing Printers and Destructors with union: %glr-parser ...
 ./actions.at:1051: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
-./actions.at:1050: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./actions.at:1051: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
+./actions.at:374:  $PREPARSER ./input
+stderr:
+1.1
+1.1: syntax error
+./actions.at:374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+326. actions.at:374:  ok
+stderr:
+./actions.at:1051: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+stdout:
 ./actions.at:1048:  $PREPARSER ./input '(x)'
 stderr:
 sending: '(' (0@0-9)
@@ -9822,6 +9716,7 @@
 Freeing nterm input (2@0-29)
 Successful parse.
 ./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
 ./actions.at:1048:  $PREPARSER ./input '!'
 stderr:
 sending: '!' (0@0-9)
@@ -9844,12 +9739,10 @@
 Freeing nterm input (5@0-29)
 Successful parse.
 ./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stdout:
+339. actions.at:1053: testing Printers and Destructors: %header lalr1.cc ...
+./actions.at:1053: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
 ./actions.at:1048:  $PREPARSER ./input '(y)'
-./headers.at:325: echo "x6" >>expout
 stderr:
-./headers.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o x7.c x7.y
 sending: '(' (0@0-9)
 sending: 'y' (1@10-19)
 10.10-19.18: syntax error, unexpected 'y', expecting 'x'
@@ -9903,6 +9796,7 @@
 ./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./actions.at:1048:  $PREPARSER ./input '(x)(x)x'
 stderr:
+./actions.at:1053: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 sending: '(' (0@0-9)
 sending: 'x' (1@10-19)
 thing (1@10-19): 'x' (1@10-19)
@@ -9968,57 +9862,148 @@
 Freeing nterm line (3@30-59)
 Freeing nterm line (0@0-29)
 Parsing FAILED (status 2).
-./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./headers.at:326: $CC $CFLAGS $CPPFLAGS  -c -o x7.o x7.c 
-336. actions.at:1048:  ok
-
-339. actions.at:1053: testing Printers and Destructors: %header lalr1.cc ...
-./actions.at:1053: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
 stderr:
+./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stdout:
 ./actions.at:478:  $PREPARSER ./input
 stderr:
-./actions.at:1053: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 ./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-331. actions.at:478:  ok
+336. actions.at:1048:  ok
+332. actions.at:478: 
+ ok
+stderr:
 
+input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
+input.y: warning: 6 reduce/reduce conflicts [-Wconflicts-rr]
+input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples]
+  First example: . c A A $end
+  First reduce derivation
+    $accept
+    `-> 0: a                                   $end
+           `-> 1: b               d
+                  `-> 3: %empty . `-> 6: c A A
+  Second example: . c A A $end
+  Second reduce derivation
+    $accept
+    `-> 0: a                                   $end
+           `-> 2: c               d
+                  `-> 4: %empty . `-> 6: c A A
+input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples]
+time limit exceeded: 6.000000
+  First example: b . c A A $end
+  First reduce derivation
+    $accept
+    `-> 0: a                                                   $end
+           `-> 1: b d
+                    `-> 5: a
+                           `-> 1: b               d
+                                  `-> 3: %empty . `-> 6: c A A
+  Second example: b . A $end
+  Second reduce derivation
+    $accept
+    `-> 0: a                                 $end
+           `-> 1: b d
+                    `-> 6: c               A
+                           `-> 4: %empty .
+input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples]
+time limit exceeded: 6.000000
+  First example: c . c A A $end
+  First reduce derivation
+    $accept
+    `-> 0: a                                                   $end
+           `-> 2: c d
+                    `-> 5: a
+                           `-> 1: b               d
+                                  `-> 3: %empty . `-> 6: c A A
+  Second example: c . A $end
+  Second reduce derivation
+    $accept
+    `-> 0: a                                 $end
+           `-> 2: c d
+                    `-> 6: c               A
+                           `-> 4: %empty .
+input.y: warning: shift/reduce conflict on token A [-Wcounterexamples]
+time limit exceeded: 6.000000
+  First example: b c . A
+  Shift derivation
+    a
+    `-> 1: b d
+             `-> 6: c . A
+  Second example: b c . c A A $end
+  Reduce derivation
+    $accept
+    `-> 0: a                                                                   $end
+           `-> 1: b d
+                    `-> 5: a
+                           `-> 2: c d
+                                    `-> 5: a
+                                           `-> 1: b               d
+                                                  `-> 3: %empty . `-> 6: c A A
+input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples]
+  First example: b c . c A A $end
+  First reduce derivation
+    $accept
+    `-> 0: a                                                                   $end
+           `-> 1: b d
+                    `-> 5: a
+                           `-> 2: c d
+                                    `-> 5: a
+                                           `-> 1: b               d
+                                                  `-> 3: %empty . `-> 6: c A A
+  Second example: b c . A $end
+  Second reduce derivation
+    $accept
+    `-> 0: a                                                 $end
+           `-> 1: b d
+                    `-> 5: a
+                           `-> 2: c d
+                                    `-> 6: c               A
+                                           `-> 4: %empty .
+input.y: warning: shift/reduce conflict on token A [-Wcounterexamples]
+  First example: b c . A
+  Shift derivation
+    a
+    `-> 1: b d
+             `-> 6: c . A
+  Second example: b c . A $end
+  Reduce derivation
+    $accept
+    `-> 0: a                                                 $end
+           `-> 1: b d
+                    `-> 5: a
+                           `-> 2: c d
+                                    `-> 6: c               A
+                                           `-> 4: %empty .
+input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples]
+  Example: b d .
+  First reduce derivation
+    a
+    `-> 1: b d .
+  Second reduce derivation
+    a
+    `-> 1: b d
+             `-> 7: d .
+input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples]
+  Example: c d .
+  First reduce derivation
+    a
+    `-> 2: c d .
+  Second reduce derivation
+    a
+    `-> 2: c d
+             `-> 7: d .
+input.y:5.4: warning: rule useless in parser due to conflicts [-Wother]
+input.y:6.15: warning: rule useless in parser due to conflicts [-Wother]
+./counterexample.at:621: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr
 340. actions.at:1054: testing Printers and Destructors with union: %header lalr1.cc ...
 ./actions.at:1054: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./actions.at:1054: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stderr:
-stdout:
-stdout:
-./actions.at:478:  $PREPARSER ./input
-./headers.at:326: echo "x7" >>expout
-stderr:
-./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./headers.at:327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o x8.c x8.y
-332. actions.at:478:  ok
-
+./counterexample.at:621: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wcounterexamples input.y
 341. actions.at:1056: testing Printers and Destructors: %header glr.cc ...
 ./actions.at:1056: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./headers.at:327: $CC $CFLAGS $CPPFLAGS  -c -o x8.o x8.c 
+./actions.at:1054: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 ./actions.at:1056: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./headers.at:327: echo "x8" >>expout
-./headers.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o x9.cc x9.y
-./headers.at:328: $CXX $CPPFLAGS  $CXXFLAGS -c -o x9.o x9.cc 
-stderr:
-stdout:
-./actions.at:374:  $PREPARSER ./input
-stderr:
-1.1
-1.1: syntax error
-./actions.at:374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-326. actions.at:374:  ok
-
-342. actions.at:1057: testing Printers and Destructors with union: %header glr.cc ...
-./actions.at:1057: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./actions.at:1057: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
 ./actions.at:1050:  $PREPARSER ./input '(x)'
 stderr:
 sending: '(' (0@0-9)
@@ -10070,11 +10055,11 @@
 Freeing nterm input (2@0-29)
 Successful parse.
 ./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stdout:
 ./actions.at:1050:  $PREPARSER ./input '(xxxxx)(x)(x)y'
 stderr:
-./actions.at:1051:  $PREPARSER ./input '(x)'
+stderr:
+stdout:
+./headers.at:328: echo "x9" >>expout
 sending: '(' (0@0-9)
 sending: 'x' (1@10-19)
 thing (1@10-19): 'x' (1@10-19)
@@ -10111,23 +10096,9 @@
 Freeing token 'y' (13@130-139)
 Parsing FAILED.
 ./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-sending: '(' (0@0-9)
-sending: 'x' (1@10-19)
-thing (1@10-19): 'x' (1@10-19)
-sending: ')' (2@20-29)
-line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29)
-sending: END (3@30-39)
-input (0@29-29): /* Nothing */
-input (2@0-29): line (0@0-29) input (0@29-29)
-Freeing token END (3@30-39)
-Freeing nterm input (2@0-29)
-Successful parse.
-./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./actions.at:1051:  $PREPARSER ./input '!'
+./headers.at:329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o xa.cc xa.y
 ./actions.at:1050:  $PREPARSER ./input '(x)(x)x'
 stderr:
-stderr:
 sending: '(' (0@0-9)
 sending: 'x' (1@10-19)
 thing (1@10-19): 'x' (1@10-19)
@@ -10148,6 +10119,36 @@
 Freeing token END (7@70-79)
 Parsing FAILED.
 ./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+337. actions.at:1050:  ok
+
+./headers.at:329: $CXX $CPPFLAGS  $CXXFLAGS -c -o xa.o xa.cc 
+342. actions.at:1057: testing Printers and Destructors with union: %header glr.cc ...
+./actions.at:1057: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+./actions.at:1057: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./actions.at:478:  $PREPARSER ./input
+stderr:
+stderr:
+./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stdout:
+./actions.at:1051:  $PREPARSER ./input '(x)'
+stderr:
+333. actions.at:478:  ok
+sending: '(' (0@0-9)
+sending: 'x' (1@10-19)
+thing (1@10-19): 'x' (1@10-19)
+sending: ')' (2@20-29)
+line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29)
+sending: END (3@30-39)
+input (0@29-29): /* Nothing */
+input (2@0-29): line (0@0-29) input (0@29-29)
+Freeing token END (3@30-39)
+Freeing nterm input (2@0-29)
+Successful parse.
+./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./actions.at:1051:  $PREPARSER ./input '!'
+stderr:
 sending: '!' (0@0-9)
 sending: END (1@10-19)
 raise (4@9-9): %empty
@@ -10156,7 +10157,7 @@
 Freeing nterm input (5@0-19)
 Successful parse.
 ./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-337. actions.at:1050:  ok
+
 ./actions.at:1051:  $PREPARSER ./input '!!!'
 stderr:
 sending: '!' (0@0-9)
@@ -10169,7 +10170,8 @@
 Freeing nterm input (5@0-29)
 Successful parse.
 ./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-
+343. actions.at:1059: testing Printers and Destructors: %header glr2.cc ...
+./actions.at:1059: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
 ./actions.at:1051:  $PREPARSER ./input '(y)'
 stderr:
 sending: '(' (0@0-9)
@@ -10225,8 +10227,6 @@
 ./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./actions.at:1051:  $PREPARSER ./input '(x)(x)x'
 stderr:
-343. actions.at:1059: testing Printers and Destructors: %header glr2.cc ...
-./actions.at:1059: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
 sending: '(' (0@0-9)
 sending: 'x' (1@10-19)
 thing (1@10-19): 'x' (1@10-19)
@@ -10247,11 +10247,11 @@
 Freeing token END (7@70-79)
 Parsing FAILED.
 ./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./actions.at:1059: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 338. actions.at:1051:  ok
 
 344. actions.at:1060: testing Printers and Destructors with union: %header glr2.cc ...
 ./actions.at:1060: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./actions.at:1059: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 ./actions.at:1060: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
@@ -10368,8 +10368,6 @@
 ./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 339. actions.at:1053:  ok
 
-345. actions.at:1071: testing Default tagless %printer and %destructor ...
-./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 stderr:
 stdout:
 ./actions.at:1054:  $PREPARSER ./input '(x)'
@@ -10386,6 +10384,8 @@
 Freeing nterm input (2@0-29)
 Successful parse.
 ./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+345. actions.at:1071: testing Default tagless %printer and %destructor ...
+./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 ./actions.at:1054:  $PREPARSER ./input '!'
 stderr:
 sending: '!' (0@0-9)
@@ -10397,7 +10397,6 @@
 Successful parse.
 ./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./actions.at:1054:  $PREPARSER ./input '!!!'
-./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Werror
 stderr:
 sending: '!' (0@0-9)
 sending: '!' (1@10-19)
@@ -10424,6 +10423,7 @@
 Freeing nterm input (2@0-29)
 Successful parse.
 ./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Werror
 ./actions.at:1054:  $PREPARSER ./input '(xxxxx)(x)(x)y'
 stderr:
 sending: '(' (0@0-9)
@@ -10464,7 +10464,6 @@
 ./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./actions.at:1054:  $PREPARSER ./input '(x)(x)x'
 stderr:
-stderr:
 sending: '(' (0@0-9)
 sending: 'x' (1@10-19)
 thing (1@10-19): 'x' (1@10-19)
@@ -10485,46 +10484,22 @@
 Freeing token END (7@70-79)
 Parsing FAILED.
 ./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+340. actions.at:1054:  ok
+stderr:
 input.y:30.3-5: error: useless %destructor for type <*> [-Werror=other]
 input.y:30.3-5: error: useless %printer for type <*> [-Werror=other]
-340. actions.at:1054:  ok
-./actions.at:1116: sed 's,.*/$,,' stderr 1>&2
 
+./actions.at:1116: sed 's,.*/$,,' stderr 1>&2
 ./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=error
-stderr:
-stdout:
-./headers.at:328: echo "x9" >>expout
-./headers.at:329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o xa.cc xa.y
 346. actions.at:1174: testing Default tagged and per-type %printer and %destructor ...
 ./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 ./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none
 ./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Werror
-./headers.at:329: $CXX $CPPFLAGS  $CXXFLAGS -c -o xa.o xa.cc 
 ./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none
 stderr:
-stdout:
-./actions.at:478:  $PREPARSER ./input
-stderr:
 stderr:
-./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input.y:22.3-4: error: useless %destructor for type <> [-Werror=other]
 input.y:22.3-4: error: useless %printer for type <> [-Werror=other]
-333. actions.at:478:  ok
-./actions.at:1233: sed 's,.*/$,,' stderr 1>&2
-./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=error
-
-./actions.at:1120: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-347. actions.at:1307: testing Default %printer and %destructor for user-defined end token ...
-./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input0.c input0.y
-./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none
-./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input0.c input0.y -Werror
-stderr:
-input0.y:30.3-5: error: useless %destructor for type <*> [-Werror=other]
-input0.y:30.3-5: error: useless %printer for type <*> [-Werror=other]
-./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none
-./actions.at:1416: sed 's,.*/$,,' stderr 1>&2
-stderr:
-./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input0.c input0.y --warnings=error
 stdout:
 ./actions.at:1056:  $PREPARSER ./input '(x)'
 stderr:
@@ -10540,7 +10515,9 @@
 Freeing nterm input (2@0-29)
 Successful parse.
 ./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./actions.at:1233: sed 's,.*/$,,' stderr 1>&2
 ./actions.at:1056:  $PREPARSER ./input '!'
+./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=error
 stderr:
 sending: '!' (0@0-9)
 sending: END (1@10-19)
@@ -10551,7 +10528,6 @@
 Successful parse.
 ./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./actions.at:1056:  $PREPARSER ./input '!!!'
-./actions.at:1237: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 sending: '!' (0@0-9)
 sending: '!' (1@10-19)
@@ -10563,9 +10539,9 @@
 Freeing nterm input (5@0-29)
 Successful parse.
 ./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input0.c input0.y -Wnone,none -Werror --trace=none
 ./actions.at:1056:  $PREPARSER ./input '(y)'
 stderr:
+./actions.at:1120: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 sending: '(' (0@0-9)
 sending: 'y' (1@10-19)
 10.10-19.18: syntax error, unexpected 'y', expecting 'x'
@@ -10581,6 +10557,7 @@
 ./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./actions.at:1056:  $PREPARSER ./input '(xxxxx)(x)(x)y'
 stderr:
+./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none
 sending: '(' (0@0-9)
 sending: 'x' (1@10-19)
 thing (1@10-19): 'x' (1@10-19)
@@ -10619,7 +10596,6 @@
 ./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./actions.at:1056:  $PREPARSER ./input '(x)(x)x'
 stderr:
-./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input0.c input0.y --warnings=none -Werror --trace=none
 sending: '(' (0@0-9)
 sending: 'x' (1@10-19)
 thing (1@10-19): 'x' (1@10-19)
@@ -10641,19 +10617,25 @@
 Parsing FAILED.
 ./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 341. actions.at:1056:  ok
+./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none
 
-./actions.at:1416: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input0 input0.c $LIBS
-348. actions.at:1429: testing Default %printer and %destructor are not for error or $undefined ...
-./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Werror
+347. actions.at:1307: testing Default %printer and %destructor for user-defined end token ...
+./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input0.c input0.y
+./actions.at:1237: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input0.c input0.y -Werror
 stderr:
-input.y:23.6-8: error: useless %destructor for type <*> [-Werror=other]
-input.y:23.6-8: error: useless %printer for type <*> [-Werror=other]
-./actions.at:1474: sed 's,.*/$,,' stderr 1>&2
-./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=error
-./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none
-./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none
-./actions.at:1478: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+input0.y:30.3-5: error: useless %destructor for type <*> [-Werror=other]
+input0.y:30.3-5: error: useless %printer for type <*> [-Werror=other]
+./actions.at:1416: sed 's,.*/$,,' stderr 1>&2
+./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input0.c input0.y --warnings=error
+stderr:
+stdout:
+./headers.at:329: echo "xa" >>expout
+./headers.at:330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o xb.cc xb.y
+./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input0.c input0.y -Wnone,none -Werror --trace=none
+./headers.at:330: $CXX $CPPFLAGS  $CXXFLAGS -c -o xb.o xb.cc 
+./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input0.c input0.y --warnings=none -Werror --trace=none
+./actions.at:1416: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input0 input0.c $LIBS
 stderr:
 stdout:
 ./actions.at:1121:  $PREPARSER ./input --debug
@@ -10697,16 +10679,12 @@
 ./actions.at:1121: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 345. actions.at:1071:  ok
 
-349. actions.at:1532: testing Default %printer and %destructor are not for $accept ...
-./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Werror
+348. actions.at:1429: testing Default %printer and %destructor are not for error or $undefined ...
+./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 stderr:
 stdout:
 ./actions.at:1238:  $PREPARSER ./input --debug
 stderr:
-stderr:
-input.y:24.3-4: error: useless %destructor for type <> [-Werror=other]
-input.y:24.3-4: error: useless %printer for type <> [-Werror=other]
 Starting parse
 Entering state 0
 Stack now 0
@@ -10758,130 +10736,28 @@
 Cleanup: discarding lookahead token "end of file" ()
 Stack now 0
 ./actions.at:1238: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./actions.at:1582: sed 's,.*/$,,' stderr 1>&2
-./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=error
 346. actions.at:1174:  ok
 
-350. actions.at:1596: testing Default %printer and %destructor for midrule values ...
-./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Werror
+349. actions.at:1532: testing Default %printer and %destructor are not for $accept ...
+./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 stderr:
-./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none
-stdout:
-./actions.at:1416:  $PREPARSER ./input0 --debug
+input.y:23.6-8: error: useless %destructor for type <*> [-Werror=other]
+input.y:23.6-8: error: useless %printer for type <*> [-Werror=other]
+./actions.at:1474: sed 's,.*/$,,' stderr 1>&2
+./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=error
+./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Werror
 stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reducing stack by rule 1 (line 49):
--> $$ = nterm start (1.1: <> for 'S' @ 1)
-Entering state 1
-Stack now 0 1
-Reading a token
-Now at end of input.
-Shifting token END (1.1: <> for 'E' @ 1)
-Entering state 2
-Stack now 0 1 2
-Stack now 0 1 2
-Cleanup: popping token END (1.1: <> for 'E' @ 1)
-Cleanup: popping nterm start (1.1: <> for 'S' @ 1)
-./actions.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input1.c input1.y
+input.y:24.3-4: error: useless %destructor for type <> [-Werror=other]
+input.y:24.3-4: error: useless %printer for type <> [-Werror=other]
+./actions.at:1582: sed 's,.*/$,,' stderr 1>&2
+./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none
+./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=error
+./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none
+./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none
 ./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none
-./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Werror
-stderr:
-stdout:
-./actions.at:1479:  $PREPARSER ./input --debug
-./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input1.c input1.y -Werror
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token 'a' ('a')
-Shifting token 'a' ('a')
-Entering state 1
-Stack now 0 1
-Reading a token
-Next token is token 'b' ('b')
-syntax error
-Shifting token error ()
-Entering state 3
-Stack now 0 1 3
-Next token is token 'b' ('b')
-Shifting token 'b' ('b')
-Entering state 5
-Stack now 0 1 3 5
-Reading a token
-Next token is token "invalid token" ()
-Error: popping token 'b' ('b')
-DESTROY 'b'
-Stack now 0 1 3
-Error: popping token error ()
-Stack now 0 1
-Shifting token error ()
-Entering state 3
-Stack now 0 1 3
-Next token is token "invalid token" ()
-Error: discarding token "invalid token" ()
-Error: popping token error ()
-Stack now 0 1
-Shifting token error ()
-Entering state 3
-Stack now 0 1 3
-Reading a token
-Now at end of input.
-Cleanup: discarding lookahead token "end of file" ()
-Stack now 0 1 3
-Cleanup: popping token error ()
-Cleanup: popping token 'a' ('a')
-DESTROY 'a'
-./actions.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./actions.at:1478: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./actions.at:1586: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-348. actions.at:1429:  ok
-stderr:
-input.y:24.57-59: error: useless %destructor for type <*> [-Werror=other]
-input.y:24.57-59: error: useless %printer for type <*> [-Werror=other]
-input.y:33.3-23: error: unset value: $$ [-Werror=other]
-input.y:32.3-23: error: unused value: $3 [-Werror=other]
-
-./actions.at:1634: sed 's,.*/$,,' stderr 1>&2
-./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=error
-stderr:
-input1.y:30.3-4: error: useless %destructor for type <> [-Werror=other]
-input1.y:30.3-4: error: useless %printer for type <> [-Werror=other]
-./actions.at:1417: sed 's,.*/$,,' stderr 1>&2
-351. actions.at:1743: testing @$ in %initial-action implies %locations ...
-./actions.at:1743: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input1.c input1.y --warnings=error
-./actions.at:1743: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input1.c input1.y -Wnone,none -Werror --trace=none
-./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none
-./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input1.c input1.y --warnings=none -Werror --trace=none
-./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none
-./actions.at:1417: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input1 input1.c $LIBS
-./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -o input.c input.y
-./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y -Werror
-stderr:
-input.y:24.57-59: error: useless %destructor for type <*> [-Werror=other]
-   24 | %printer    { #error "<*> printer should not be used" } <*>
-      |                                                         ^~~
-input.y:24.57-59: error: useless %printer for type <*> [-Werror=other]
-   24 | %printer    { #error "<*> printer should not be used" } <*>
-      |                                                         ^~~
-input.y:33.3-23: error: unset value: $$ [-Werror=other]
-   33 |   {           @$ = 4; } // Only used.
-      |   ^~~~~~~~~~~~~~~~~~~~~
-input.y:32.3-23: error: unused value: $3 [-Werror=other]
-   32 |   { USE ($$); @$ = 3; } // Only set.
-      |   ^~~~~~~~~~~~~~~~~~~~~
-./actions.at:1641: sed 's,.*/$,,' stderr 1>&2
-./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error
-./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none
-./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none
-stderr:
-stdout:
-349. actions.at:1532:  ok
-
 stderr:
 stdout:
 ./actions.at:1057:  $PREPARSER ./input '(x)'
@@ -10898,10 +10774,7 @@
 Freeing nterm input (2@0-29)
 Successful parse.
 ./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./actions.at:1656: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-352. actions.at:1744: testing @$ in %destructor implies %locations ...
 ./actions.at:1057:  $PREPARSER ./input '!'
-./actions.at:1744: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 stderr:
 sending: '!' (0@0-9)
 sending: END (1@10-19)
@@ -10925,7 +10798,6 @@
 ./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./actions.at:1057:  $PREPARSER ./input '(y)'
 stderr:
-stderr:
 sending: '(' (0@0-9)
 sending: 'y' (1@10-19)
 10.10-19.18: syntax error, unexpected 'y', expecting 'x'
@@ -10939,10 +10811,7 @@
 Freeing nterm input (2@0-29)
 Successful parse.
 ./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stdout:
-./actions.at:1744: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./actions.at:1057:  $PREPARSER ./input '(xxxxx)(x)(x)y'
-351. actions.at:1743:  ok
 stderr:
 sending: '(' (0@0-9)
 sending: 'x' (1@10-19)
@@ -10980,7 +10849,6 @@
 Freeing token 'y' (13@130-139)
 Parsing FAILED.
 ./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-
 ./actions.at:1057:  $PREPARSER ./input '(x)(x)x'
 stderr:
 sending: '(' (0@0-9)
@@ -11004,10 +10872,139 @@
 Parsing FAILED.
 ./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 342. actions.at:1057:  ok
-353. actions.at:1745: testing @$ in %printer implies %locations ...
-./actions.at:1745: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 
+350. actions.at:1596: testing Default %printer and %destructor for midrule values ...
+./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+stderr:
+stdout:
+./actions.at:1416:  $PREPARSER ./input0 --debug
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reducing stack by rule 1 (line 49):
+-> $$ = nterm start (1.1: <> for 'S' @ 1)
+Entering state 1
+Stack now 0 1
+Reading a token
+Now at end of input.
+Shifting token END (1.1: <> for 'E' @ 1)
+Entering state 2
+Stack now 0 1 2
+Stack now 0 1 2
+Cleanup: popping token END (1.1: <> for 'E' @ 1)
+Cleanup: popping nterm start (1.1: <> for 'S' @ 1)
+./actions.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input1.c input1.y
+./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Werror
+./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input1.c input1.y -Werror
+stderr:
+input.y:24.57-59: error: useless %destructor for type <*> [-Werror=other]
+input.y:24.57-59: error: useless %printer for type <*> [-Werror=other]
+input.y:33.3-23: error: unset value: $$ [-Werror=other]
+input.y:32.3-23: error: unused value: $3 [-Werror=other]
+./actions.at:1634: sed 's,.*/$,,' stderr 1>&2
+stderr:
+input1.y:30.3-4: error: useless %destructor for type <> [-Werror=other]
+input1.y:30.3-4: error: useless %printer for type <> [-Werror=other]
+./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=error
+./actions.at:1417: sed 's,.*/$,,' stderr 1>&2
+./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input1.c input1.y --warnings=error
+./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none
+./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input1.c input1.y -Wnone,none -Werror --trace=none
+./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input1.c input1.y --warnings=none -Werror --trace=none
+./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none
+stderr:
+stdout:
+349. actions.at:1532:  ok
+
+stderr:
+stdout:
+./actions.at:1479:  $PREPARSER ./input --debug
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token 'a' ('a')
+Shifting token 'a' ('a')
+Entering state 1
+Stack now 0 1
+Reading a token
+Next token is token 'b' ('b')
+syntax error
+Shifting token error ()
+Entering state 3
+Stack now 0 1 3
+Next token is token 'b' ('b')
+Shifting token 'b' ('b')
+Entering state 5
+Stack now 0 1 3 5
+Reading a token
+Next token is token "invalid token" ()
+Error: popping token 'b' ('b')
+DESTROY 'b'
+Stack now 0 1 3
+Error: popping token error ()
+Stack now 0 1
+Shifting token error ()
+Entering state 3
+Stack now 0 1 3
+Next token is token "invalid token" ()
+Error: discarding token "invalid token" ()
+Error: popping token error ()
+Stack now 0 1
+Shifting token error ()
+Entering state 3
+Stack now 0 1 3
+Reading a token
+Now at end of input.
+Cleanup: discarding lookahead token "end of file" ()
+Stack now 0 1 3
+Cleanup: popping token error ()
+Cleanup: popping token 'a' ('a')
+DESTROY 'a'
+./actions.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+348. actions.at:1429:  ok
+351. actions.at:1743: testing @$ in %initial-action implies %locations ...
+./actions.at:1743: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+
+./actions.at:1417: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input1 input1.c $LIBS
+./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -o input.c input.y
+352. actions.at:1744: testing @$ in %destructor implies %locations ...
+./actions.at:1744: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+./actions.at:1743: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./actions.at:1744: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y -Werror
+stderr:
+input.y:24.57-59: error: useless %destructor for type <*> [-Werror=other]
+   24 | %printer    { #error "<*> printer should not be used" } <*>
+      |                                                         ^~~
+input.y:24.57-59: error: useless %printer for type <*> [-Werror=other]
+   24 | %printer    { #error "<*> printer should not be used" } <*>
+      |                                                         ^~~
+input.y:33.3-23: error: unset value: $$ [-Werror=other]
+   33 |   {           @$ = 4; } // Only used.
+      |   ^~~~~~~~~~~~~~~~~~~~~
+input.y:32.3-23: error: unused value: $3 [-Werror=other]
+   32 |   { USE ($$); @$ = 3; } // Only set.
+      |   ^~~~~~~~~~~~~~~~~~~~~
+./actions.at:1641: sed 's,.*/$,,' stderr 1>&2
+./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error
+./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none
+./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none
+./actions.at:1656: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+stderr:
+stderr:
+stdout:
+352. actions.at:1744:  ok
+stdout:
+351. actions.at:1743:  ok
+
+
+353. actions.at:1745: testing @$ in %printer implies %locations ...
 354. actions.at:1856: testing Qualified $$ in actions: yacc.c ...
+./actions.at:1745: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 ./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 ./actions.at:1745: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./actions.at:1856: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
@@ -11038,12 +11035,6 @@
 ./actions.at:1856: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
-352. actions.at:1744:  ok
-
-stderr:
-356. actions.at:1856: testing Qualified $$ in actions: lalr1.cc ...
-./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-stdout:
 ./actions.at:1657:  $PREPARSER ./input --debug
 stderr:
 Starting parse
@@ -11084,14 +11075,10 @@
 ./actions.at:1657: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 350. actions.at:1596:  ok
 
-./actions.at:1856: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-357. actions.at:1856: testing Qualified $$ in actions: glr.cc ...
+356. actions.at:1856: testing Qualified $$ in actions: lalr1.cc ...
 ./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
 stderr:
 stdout:
-353. actions.at:1745:  ok
-stderr:
-stdout:
 ./actions.at:1856:  $PREPARSER ./input --debug
 stderr:
 Starting parse
@@ -11122,9 +11109,10 @@
 Cleanup: popping token "end of file" ()
 Cleanup: popping nterm float (ival: 30, fval: 0.3)
 ./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./actions.at:1856: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-
 stderr:
+stdout:
+stderr:
+./headers.at:330: echo "xb" >>expout
 Starting parse
 Entering state 0
 Stack now 0
@@ -11153,50 +11141,25 @@
 Cleanup: popping token "end of file" ()
 Cleanup: popping nterm float (ival: 30, fval: 0.3)
 ./actions.at:1856: sed -ne '/ival:/p' stderr
+./headers.at:331: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o xc.cc xc.y
+./actions.at:1856: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 354. actions.at:1856:  ok
-358. actions.at:1856: testing Qualified $$ in actions: glr2.cc ...
-./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-
-359. actions.at:1863: testing Destroying lookahead assigned by semantic action ...
-./actions.at:1905: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./actions.at:1856: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-./actions.at:1906: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-stderr:
-stdout:
-./headers.at:329: echo "xa" >>expout
-./headers.at:330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o xb.cc xb.y
-./headers.at:330: $CXX $CPPFLAGS  $CXXFLAGS -c -o xb.o xb.cc 
-stderr:
-stdout:
-./actions.at:1907:  $PREPARSER ./input
-stderr:
-'b' destructor
-'a' destructor
-./actions.at:1907: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-359. actions.at:1863:  ok
 
-360. actions.at:1918: testing YYBACKUP ...
-./actions.at:1953: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./actions.at:1954: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+357. actions.at:1856: testing Qualified $$ in actions: glr.cc ...
+./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+./headers.at:331: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS -c -o xc.o xc.cc 
 stderr:
 stdout:
-./actions.at:1955:  $PREPARSER ./input
-stderr:
-./actions.at:1955: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-360. actions.at:1918:  ok
-
-361. types.at:25: testing %union vs. api.value.type ...
-./types.at:34: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-361. types.at:25:  ok
+353. actions.at:1745:  ok
 
-362. types.at:44: testing %yacc vs. api.value.type=union ...
-./types.at:53: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-362. types.at:44:  ok
+./actions.at:1856: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+358. actions.at:1856: testing Qualified $$ in actions: glr2.cc ...
+./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+./actions.at:1856: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./actions.at:1856:  $PREPARSER ./input --debug
 stderr:
-
 Starting parse
 Entering state 0
 Reading a token
@@ -11244,32 +11207,21 @@
 ./actions.at:1856: sed -ne '/ival:/p' stderr
 355. actions.at:1856:  ok
 
-363. types.at:139: testing yacc.c api.value.type={double} ...
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
-364. types.at:139: testing yacc.c api.value.type={double} %header ...
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-363. types.at:139:  ok
+359. actions.at:1863: testing Destroying lookahead assigned by semantic action ...
+./actions.at:1905: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+./actions.at:1906: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
-
 stdout:
-./types.at:139:  $PREPARSER ./test
+./actions.at:1907:  $PREPARSER ./input
 stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-365. types.at:139: testing yacc.c api.value.type={variant} ...
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
-364. types.at:139:  ok
+'b' destructor
+'a' destructor
+./actions.at:1907: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+359. actions.at:1863:  ok
 
-366. types.at:139: testing yacc.c api.value.type={variant} %header ...
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
+360. actions.at:1918: testing YYBACKUP ...
+./actions.at:1953: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+./actions.at:1954: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
 ./actions.at:1856:  $PREPARSER ./input --debug
@@ -11333,156 +11285,21 @@
 ./actions.at:1856: sed -ne '/ival:/p' stderr
 356. actions.at:1856:  ok
 
-367. types.at:139: testing yacc.c api.value.type={struct foo} ...
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-366. types.at:139:  ok
-
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-368. types.at:139: testing yacc.c api.value.type={struct foo} %header ...
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-365. types.at:139:  ok
-
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
-369. types.at:139: testing yacc.c api.value.type={struct bar} ...
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
-stderr:
-stdout:
-./actions.at:1856:  $PREPARSER ./input --debug
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token UNTYPED (ival: 10, fval: 0.1)
-Shifting token UNTYPED (ival: 10, fval: 0.1)
-Entering state 1
-Reading a token
-Next token is token INT (ival: 20, fval: 0.2)
-Shifting token INT (ival: 20, fval: 0.2)
-Entering state 3
-Reducing stack 0 by rule 1 (line 55):
-   $1 = token UNTYPED (ival: 10, fval: 0.1)
-   $2 = token INT (ival: 20, fval: 0.2)
--> $$ = nterm float (ival: 30, fval: 0.3)
-Entering state 2
-Reading a token
-Now at end of input.
-Shifting token "end of file" ()
-Entering state 4
-Cleanup: popping token "end of file" ()
-Cleanup: popping nterm float (ival: 30, fval: 0.3)
-./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token UNTYPED (ival: 10, fval: 0.1)
-Shifting token UNTYPED (ival: 10, fval: 0.1)
-Entering state 1
-Reading a token
-Next token is token INT (ival: 20, fval: 0.2)
-Shifting token INT (ival: 20, fval: 0.2)
-Entering state 3
-Reducing stack 0 by rule 1 (line 55):
-   $1 = token UNTYPED (ival: 10, fval: 0.1)
-   $2 = token INT (ival: 20, fval: 0.2)
--> $$ = nterm float (ival: 30, fval: 0.3)
-Entering state 2
-Reading a token
-Now at end of input.
-Shifting token "end of file" ()
-Entering state 4
-Cleanup: popping token "end of file" ()
-Cleanup: popping nterm float (ival: 30, fval: 0.3)
-./actions.at:1856: sed -ne '/ival:/p' stderr
-357. actions.at:1856:  ok
-
-370. types.at:139: testing yacc.c api.value.type={struct bar} %header ...
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
-stderr:
-stdout:
-./headers.at:330: echo "xb" >>expout
-./headers.at:331: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o xc.cc xc.y
-./headers.at:331: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS -c -o xc.o xc.cc 
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-367. types.at:139:  ok
-
-371. types.at:139: testing yacc.c api.value.type={union foo} ...
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-368. types.at:139:  ok
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-369. types.at:139:  ok
-
-372. types.at:139: testing yacc.c api.value.type={union foo} %header ...
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
-373. types.at:139: testing yacc.c %union { float fval; int ival; }; ...
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
-370. types.at:139:  ok
-
-374. types.at:139: testing yacc.c %union { float fval; int ival; }; %header ...
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-373. types.at:139:  ok
-
-375. types.at:139: testing yacc.c %union foo { float fval; int ival; }; ...
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
+361. types.at:25: testing %union vs. api.value.type ...
+./types.at:34: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
 stderr:
 stdout:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
-./types.at:139:  $PREPARSER ./test
-371. types.at:139:  ok
+./actions.at:1955:  $PREPARSER ./input
 stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-372. types.at:139:  ok
+./actions.at:1955: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+360. actions.at:1918:  ok
+361. types.at:25:  ok
 
 
 stderr:
-376. types.at:139: testing yacc.c %union foo { float fval; int ival; }; %header ...
+362. types.at:44: testing %yacc vs. api.value.type=union ...
 stdout:
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+./types.at:53: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
 ./actions.at:1059:  $PREPARSER ./input '(x)'
 stderr:
 sending: '(' (0@0-9)
@@ -11497,7 +11314,7 @@
 Freeing nterm input (2@0-29)
 Successful parse.
 ./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-377. types.at:139: testing yacc.c api.value.union.name=foo; %union { float fval; int ival; }; ...
+363. types.at:139: testing yacc.c api.value.type={double} ...
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
 ./actions.at:1059:  $PREPARSER ./input '!'
 stderr:
@@ -11521,8 +11338,10 @@
 Freeing nterm input (5@0-29)
 Successful parse.
 ./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 ./actions.at:1059:  $PREPARSER ./input '(y)'
 stderr:
+362. types.at:44:  ok
 sending: '(' (0@0-9)
 sending: 'y' (1@10-19)
 10.10-19.18: syntax error, unexpected 'y', expecting 'x'
@@ -11536,10 +11355,10 @@
 Freeing nterm input (2@0-29)
 Successful parse.
 ./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
+
 ./actions.at:1059:  $PREPARSER ./input '(xxxxx)(x)(x)y'
 stderr:
+stderr:
 sending: '(' (0@0-9)
 sending: 'x' (1@10-19)
 thing (1@10-19): 'x' (1@10-19)
@@ -11576,8 +11395,56 @@
 Freeing token 'y' (13@130-139)
 Parsing FAILED.
 ./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stdout:
+./actions.at:1856:  $PREPARSER ./input --debug
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token UNTYPED (ival: 10, fval: 0.1)
+Shifting token UNTYPED (ival: 10, fval: 0.1)
+Entering state 1
+Reading a token
+Next token is token INT (ival: 20, fval: 0.2)
+Shifting token INT (ival: 20, fval: 0.2)
+Entering state 3
+Reducing stack 0 by rule 1 (line 55):
+   $1 = token UNTYPED (ival: 10, fval: 0.1)
+   $2 = token INT (ival: 20, fval: 0.2)
+-> $$ = nterm float (ival: 30, fval: 0.3)
+Entering state 2
+Reading a token
+Now at end of input.
+Shifting token "end of file" ()
+Entering state 4
+Cleanup: popping token "end of file" ()
+Cleanup: popping nterm float (ival: 30, fval: 0.3)
+./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./actions.at:1059:  $PREPARSER ./input '(x)(x)x'
 stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token UNTYPED (ival: 10, fval: 0.1)
+Shifting token UNTYPED (ival: 10, fval: 0.1)
+Entering state 1
+Reading a token
+Next token is token INT (ival: 20, fval: 0.2)
+Shifting token INT (ival: 20, fval: 0.2)
+Entering state 3
+Reducing stack 0 by rule 1 (line 55):
+   $1 = token UNTYPED (ival: 10, fval: 0.1)
+   $2 = token INT (ival: 20, fval: 0.2)
+-> $$ = nterm float (ival: 30, fval: 0.3)
+Entering state 2
+Reading a token
+Now at end of input.
+Shifting token "end of file" ()
+Entering state 4
+Cleanup: popping token "end of file" ()
+Cleanup: popping nterm float (ival: 30, fval: 0.3)
+./actions.at:1856: sed -ne '/ival:/p' stderr
 sending: '(' (0@0-9)
 sending: 'x' (1@10-19)
 thing (1@10-19): 'x' (1@10-19)
@@ -11598,21 +11465,16 @@
 Freeing token END (7@70-79)
 Parsing FAILED.
 ./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-343. actions.at:1059:  ok
-
-378. types.at:139: testing yacc.c api.value.union.name=foo; %union { float fval; int ival; }; %header ...
+364. types.at:139: testing yacc.c api.value.type={double} %header ...
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+357. actions.at:1856:  ok
+343. actions.at:1059:  ok
 ./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-374. types.at:139:  ok
+
+
 stderr:
 stdout:
 ./actions.at:1060:  $PREPARSER ./input '(x)'
-
 stderr:
 sending: '(' (0@0-9)
 sending: 'x' (1@10-19)
@@ -11626,6 +11488,10 @@
 Freeing nterm input (2@0-29)
 Successful parse.
 ./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+366. types.at:139: testing yacc.c api.value.type={variant} %header ...
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+365. types.at:139: testing yacc.c api.value.type={variant} ...
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
 ./actions.at:1060:  $PREPARSER ./input '!'
 stderr:
 sending: '!' (0@0-9)
@@ -11637,8 +11503,6 @@
 Successful parse.
 ./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./actions.at:1060:  $PREPARSER ./input '!!!'
-379. types.at:139: testing yacc.c api.value.type=union ...
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
 stderr:
 sending: '!' (0@0-9)
 sending: '!' (1@10-19)
@@ -11650,6 +11514,8 @@
 Freeing nterm input (5@0-29)
 Successful parse.
 ./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 ./actions.at:1060:  $PREPARSER ./input '(y)'
 stderr:
 sending: '(' (0@0-9)
@@ -11666,7 +11532,6 @@
 Successful parse.
 ./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./actions.at:1060:  $PREPARSER ./input '(xxxxx)(x)(x)y'
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 stderr:
 sending: '(' (0@0-9)
 sending: 'x' (1@10-19)
@@ -11728,69 +11593,128 @@
 ./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 344. actions.at:1060:  ok
 
-380. types.at:139: testing yacc.c api.value.type=union %header ...
+stderr:
+stdout:
+./types.at:139:  $PREPARSER ./test
+stderr:
+367. types.at:139: testing yacc.c api.value.type={struct foo} ...
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+363. types.at:139:  ok
+
 ./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 stderr:
+368. types.at:139: testing yacc.c api.value.type={struct foo} %header ...
 stdout:
 ./types.at:139:  $PREPARSER ./test
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-375. types.at:139:  ok
+364. types.at:139:  ok
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 
-381. types.at:139: testing glr.c api.value.type={double} ...
+369. types.at:139: testing yacc.c api.value.type={struct bar} ...
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-377. types.at:139:  ok
+366. types.at:139:  ok
+stderr:
+stdout:
+./types.at:139:  $PREPARSER ./test
+stderr:
 ./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 
-382. types.at:139: testing glr.c api.value.type={double} %header ...
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+365. types.at:139:  ok
+370. types.at:139: testing yacc.c api.value.type={struct bar} %header ...
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+
+371. types.at:139: testing yacc.c api.value.type={union foo} ...
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-376. types.at:139:  ok
+367. types.at:139:  ok
 
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
-383. types.at:139: testing glr.c api.value.type={variant} ...
+372. types.at:139: testing yacc.c api.value.type={union foo} %header ...
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
 stderr:
 stdout:
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-378. types.at:139:  ok
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
+368. types.at:139:  ok
+stderr:
+stdout:
+./types.at:139:  $PREPARSER ./test
+stderr:
 
-384. types.at:139: testing glr.c api.value.type={variant} %header ...
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+369. types.at:139:  ok
+373. types.at:139: testing yacc.c %union { float fval; int ival; }; ...
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
+374. types.at:139: testing yacc.c %union { float fval; int ival; }; %header ...
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-379. types.at:139:  ok
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
+stderr:
+371. types.at:139:  ok
+stdout:
+./types.at:139:  $PREPARSER ./test
+stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
+370. types.at:139:  ok
 
-385. types.at:139: testing glr.c api.value.type={struct foo} ...
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
 stderr:
 stdout:
+375. types.at:139: testing yacc.c %union foo { float fval; int ival; }; ...
 ./types.at:139:  $PREPARSER ./test
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
 stderr:
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
+376. types.at:139: testing yacc.c %union foo { float fval; int ival; }; %header ...
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-380. types.at:139:  ok
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+372. types.at:139:  ok
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 
-386. types.at:139: testing glr.c api.value.type={struct foo} %header ...
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
+377. types.at:139: testing yacc.c api.value.union.name=foo; %union { float fval; int ival; }; ...
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+stderr:
+stdout:
+./headers.at:331: echo "xc" >>expout
+./headers.at:332: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o xd.cc xd.y
 ./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
+./headers.at:332: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS -c -o xd.o xd.cc 
+stderr:
+stdout:
+stderr:
+./types.at:139:  $PREPARSER ./test
+stdout:
+stderr:
+./types.at:139:  $PREPARSER ./test
+stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+373. types.at:139:  ok
+374. types.at:139:  ok
+
+
 stderr:
 stdout:
 ./actions.at:1856:  $PREPARSER ./input --debug
@@ -11841,30 +11765,76 @@
 Cleanup: popping nterm float (ival: 30, fval: 0.3)
 ./actions.at:1856: sed -ne '/ival:/p' stderr
 358. actions.at:1856:  ok
-stderr:
+378. types.at:139: testing yacc.c api.value.union.name=foo; %union { float fval; int ival; }; %header ...
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+379. types.at:139: testing yacc.c api.value.type=union ...
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
 
+380. types.at:139: testing yacc.c api.value.type=union %header ...
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
+stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-382. types.at:139:  ok
-387. types.at:139: testing glr.c api.value.type={struct bar} ...
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
-
+stderr:
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
-388. types.at:139: testing glr.c api.value.type={struct bar} %header ...
+375. types.at:139:  ok
 stderr:
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+stdout:
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-381. types.at:139:  ok
+./types.at:139:  $PREPARSER ./test
+377. types.at:139:  ok
+stderr:
+
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+376. types.at:139:  ok
+
+
+381. types.at:139: testing glr.c api.value.type={double} ...
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+382. types.at:139: testing glr.c api.value.type={double} %header ...
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+383. types.at:139: testing glr.c api.value.type={variant} ...
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
 ./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 ./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
+stderr:
+stdout:
+./types.at:139:  $PREPARSER ./test
+stderr:
+stderr:
+stderr:
+stdout:
+stdout:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139:  $PREPARSER ./test
+./types.at:139:  $PREPARSER ./test
+stderr:
+stderr:
+378. types.at:139:  ok
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+379. types.at:139:  ok
+380. types.at:139:  ok
 
-389. types.at:139: testing glr.c api.value.type={union foo} ...
+
+
+385. types.at:139: testing glr.c api.value.type={struct foo} ...
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+384. types.at:139: testing glr.c api.value.type={variant} %header ...
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+386. types.at:139: testing glr.c api.value.type={struct foo} %header ...
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
 ./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -11872,37 +11842,119 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 383. types.at:139:  ok
 
-390. types.at:139: testing glr.c api.value.type={union foo} %header ...
+387. types.at:139: testing glr.c api.value.type={struct bar} ...
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
 ./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
+stderr:
+input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr]
+input.y: warning: 6 reduce/reduce conflicts [-Wconflicts-rr]
+input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples]
+  First example            . c A A $end
+  First reduce derivation  $accept -> [ a -> [ b -> [ . ] d -> [ c A A ] ] $end ]
+  Second example           . c A A $end
+  Second reduce derivation $accept -> [ a -> [ c -> [ . ] d -> [ c A A ] ] $end ]
+input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples]
+time limit exceeded: 6.000000
+  First example            b . c A A $end
+  First reduce derivation  $accept -> [ a -> [ b d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] $end ]
+  Second example           b . A $end
+  Second reduce derivation $accept -> [ a -> [ b d -> [ c -> [ . ] A ] ] $end ]
+input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples]
+time limit exceeded: 6.000000
+  First example            c . c A A $end
+  First reduce derivation  $accept -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] $end ]
+  Second example           c . A $end
+  Second reduce derivation $accept -> [ a -> [ c d -> [ c -> [ . ] A ] ] $end ]
+input.y: warning: shift/reduce conflict on token A [-Wcounterexamples]
+time limit exceeded: 6.000000
+  First example     b c . A
+  Shift derivation  a -> [ b d -> [ c . A ] ]
+  Second example    b c . c A A $end
+  Reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] ] ] $end ]
+input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples]
+  First example            b c . c A A $end
+  First reduce derivation  $accept -> [ a -> [ b d -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] ] ] $end ]
+  Second example           b c . A $end
+  Second reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ c -> [ . ] A ] ] ] ] $end ]
+input.y: warning: shift/reduce conflict on token A [-Wcounterexamples]
+  First example     b c . A
+  Shift derivation  a -> [ b d -> [ c . A ] ]
+  Second example    b c . A $end
+  Reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ c -> [ . ] A ] ] ] ] $end ]
+input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples]
+  Example                  b d .
+  First reduce derivation  a -> [ b d . ]
+  Second reduce derivation a -> [ b d -> [ d . ] ]
+input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples]
+  Example                  c d .
+  First reduce derivation  a -> [ c d . ]
+  Second reduce derivation a -> [ c d -> [ d . ] ]
+input.y:5.4: warning: rule useless in parser due to conflicts [-Wother]
+input.y:6.15: warning: rule useless in parser due to conflicts [-Wother]
+./counterexample.at:621: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stdout:
+382. types.at:139:  ok
+./types.at:139:  $PREPARSER ./test
+270. counterexample.at:610:  ok
+stderr:
+stderr:
+stdout:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139:  $PREPARSER ./test
 384. types.at:139:  ok
+stderr:
+
+
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+381. types.at:139:  ok
+389. types.at:139: testing glr.c api.value.type={union foo} ...
+388. types.at:139: testing glr.c api.value.type={struct bar} %header ...
+
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
 
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
+stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 391. types.at:139: testing glr.c %union { float fval; int ival; }; ...
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+386. types.at:139:  ok
+390. types.at:139: testing glr.c api.value.type={union foo} %header ...
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
+
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
+stderr:
+stdout:
+./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 385. types.at:139:  ok
-
 ./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 392. types.at:139: testing glr.c %union { float fval; int ival; }; %header ...
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+
+393. types.at:139: testing glr.c %union foo { float fval; int ival; }; ...
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 ./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-386. types.at:139:  ok
+387. types.at:139:  ok
 
-393. types.at:139: testing glr.c %union foo { float fval; int ival; }; ...
+394. types.at:139: testing glr.c %union foo { float fval; int ival; }; %header ...
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
 ./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 stderr:
@@ -11910,123 +11962,168 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-387. types.at:139:  ok
+389. types.at:139:  ok
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
-
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-388. types.at:139:  ok
 stderr:
-
-394. types.at:139: testing glr.c %union foo { float fval; int ival; }; %header ...
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
 stdout:
 ./types.at:139:  $PREPARSER ./test
+
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+393. types.at:139:  ok
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-389. types.at:139:  ok
+390. types.at:139:  ok
+
 395. types.at:139: testing glr.c api.value.union.name=foo; %union { float fval; int ival; }; ...
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
 
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 396. types.at:139: testing glr.c api.value.union.name=foo; %union { float fval; int ival; }; %header ...
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
 ./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
+397. types.at:139: testing glr.c api.value.type=union ...
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
 ./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-391. types.at:139:  ok
-
+388. types.at:139:  ok
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
-397. types.at:139: testing glr.c api.value.type=union ...
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
 stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-390. types.at:139:  ok
 
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
-398. types.at:139: testing glr.c api.value.type=union %header ...
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
-./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stdout:
+391. types.at:139:  ok
 ./types.at:139:  $PREPARSER ./test
 stderr:
+398. types.at:139: testing glr.c api.value.type=union %header ...
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-392. types.at:139:  ok
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.c test.y
 
+392. types.at:139:  ok
 399. types.at:139: testing lalr1.cc api.value.type={double} ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-393. types.at:139:  ok
+./types.at:139: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o test test.c $LIBS
 
 400. types.at:139: testing lalr1.cc api.value.type={double} %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
-./headers.at:331: echo "xc" >>expout
-./headers.at:332: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -d -o xd.cc xd.y
-./headers.at:332: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS -c -o xd.o xd.cc 
+./headers.at:332: echo "xd" >>expout
+./headers.at:342: "$PERL" -n -0777 -e '
+  # Ignore comments.
+  s{/\*.*?\*/}{}gs;
+  s{//.*}{}g;
+  # Ignore warnings.
+  s{# *pragma .* message ".*"}{}g;
+
+  s{\b((defined|if)\ YYDEBUG
+      |YYChar     # Template parameter.
+      |YYNTOKENS  # This is actually scoped in a C++ class.
+      |YYPUSH_MORE(?:_DEFINED)?
+      |S_(YY(ACCEPT|EMPTY|EOF|error|UNDEF))  # These guys are scoped.
+      |YY(?:_REINTERPRET)?_CAST
+      |YY_ATTRIBUTE(?:_PURE|_UNUSED)
+      |YY_CONSTEXPR
+      |YY_COPY
+      |YY_CPLUSPLUS
+      |YY_IGNORE_(?:MAYBE_UNINITIALIZED|USELESS_CAST)_(?:BEGIN|END)
+      |YY_INITIAL_VALUE
+      |YY_MOVE
+      |YY_MOVE_OR_COPY
+      |YY_MOVE_REF
+      |YY_NOEXCEPT
+      |YY_NOTHROW
+      |YY_NULLPTR
+      |YY_RVREF
+      |YY_USE
+      |YY_\w+_INCLUDED  # Header guards.
+      |FILE\ \*yyo      # Function argument.
+      |const\ yylocp    # Function argument.
+      )\b}{}gx;
+  while (/^(.*YY.*)$/gm)
+  {
+    print "$ARGV: invalid exported YY: $1\n";
+  }
+  if ($ARGV =~ /\.h$/)
+  {
+    while (/^(.*yy.*)$/gm)
+    {
+      print "$ARGV: invalid exported yy: $1\n";
+    }
+  }
+' -- *.hh *.h
+./headers.at:387: $CC $CFLAGS $CPPFLAGS  -c -o c-only.o c-only.c 
+stderr:
+stdout:
+./headers.at:387: $CXX $CPPFLAGS  $CXXFLAGS -c -o cxx-only.o cxx-only.cc 
+stderr:
+stdout:
+./headers.at:387: $CXX $CXXFLAGS $CPPFLAGS $LDFLAGS c-only.o cxx-only.o -o c-and-cxx ||
+          exit 77
+stderr:
+stdout:
+./headers.at:387:  $PREPARSER ./c-and-cxx
+stderr:
+./headers.at:387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./headers.at:392: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o parser x[1-9a-d].o -DCC_IS_CXX=$CC_IS_CXX main.cc $LIBS
 stderr:
 stdout:
+stderr:
+./types.at:139:  $PREPARSER ./test
+stdout:
+stderr:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-395. types.at:139:  ok
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+395. types.at:139:  ok
 stdout:
+394. types.at:139:  ok
 ./types.at:139:  $PREPARSER ./test
 stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 
-394. types.at:139:  ok
+stderr:
 
-401. types.at:139: testing lalr1.cc api.value.type={variant} ...
+stdout:
+./types.at:139:  $PREPARSER ./test
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+402. types.at:139: testing lalr1.cc api.value.type={variant} %header ...
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-402. types.at:139: testing lalr1.cc api.value.type={variant} %header ...
+396. types.at:139:  ok
+401. types.at:139: testing lalr1.cc api.value.type={variant} ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+397. types.at:139:  ok
+
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-396. types.at:139:  ok
 
 403. types.at:139: testing lalr1.cc api.value.type={struct foo} ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-397. types.at:139:  ok
-
 404. types.at:139: testing lalr1.cc api.value.type={struct foo} %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-stderr:
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
@@ -12049,15 +12146,18 @@
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
+stdout:
+./headers.at:394:  $PREPARSER ./parser
+stderr:
+stderr:
+./headers.at:394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+310. headers.at:199:  ok
+
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+406. types.at:139: testing lalr1.cc api.value.type={struct bar} %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
@@ -12100,12 +12200,12 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
@@ -12116,10 +12216,10 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
@@ -12132,7 +12232,6 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -12141,6 +12240,7 @@
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -12151,72 +12251,20 @@
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
-./headers.at:332: echo "xd" >>expout
-./headers.at:342: "$PERL" -n -0777 -e '
-  # Ignore comments.
-  s{/\*.*?\*/}{}gs;
-  s{//.*}{}g;
-  # Ignore warnings.
-  s{# *pragma .* message ".*"}{}g;
-
-  s{\b((defined|if)\ YYDEBUG
-      |YYChar     # Template parameter.
-      |YYNTOKENS  # This is actually scoped in a C++ class.
-      |YYPUSH_MORE(?:_DEFINED)?
-      |S_(YY(ACCEPT|EMPTY|EOF|error|UNDEF))  # These guys are scoped.
-      |YY(?:_REINTERPRET)?_CAST
-      |YY_ATTRIBUTE(?:_PURE|_UNUSED)
-      |YY_CONSTEXPR
-      |YY_COPY
-      |YY_CPLUSPLUS
-      |YY_IGNORE_(?:MAYBE_UNINITIALIZED|USELESS_CAST)_(?:BEGIN|END)
-      |YY_INITIAL_VALUE
-      |YY_MOVE
-      |YY_MOVE_OR_COPY
-      |YY_MOVE_REF
-      |YY_NOEXCEPT
-      |YY_NOTHROW
-      |YY_NULLPTR
-      |YY_RVREF
-      |YY_USE
-      |YY_\w+_INCLUDED  # Header guards.
-      |FILE\ \*yyo      # Function argument.
-      |const\ yylocp    # Function argument.
-      )\b}{}gx;
-  while (/^(.*YY.*)$/gm)
-  {
-    print "$ARGV: invalid exported YY: $1\n";
-  }
-  if ($ARGV =~ /\.h$/)
-  {
-    while (/^(.*yy.*)$/gm)
-    {
-      print "$ARGV: invalid exported yy: $1\n";
-    }
-  }
-' -- *.hh *.h
-./headers.at:387: $CC $CFLAGS $CPPFLAGS  -c -o c-only.o c-only.c 
+./types.at:139:  $PREPARSER ./test
 stderr:
-stdout:
 stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 stdout:
-./headers.at:387: $CXX $CPPFLAGS  $CXXFLAGS -c -o cxx-only.o cxx-only.cc 
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-stderr:
-stdout:
-./headers.at:387: $CXX $CXXFLAGS $CPPFLAGS $LDFLAGS c-only.o cxx-only.o -o c-and-cxx ||
-          exit 77
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
-./headers.at:387:  $PREPARSER ./c-and-cxx
-stderr:
-./headers.at:387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./headers.at:392: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o parser x[1-9a-d].o -DCC_IS_CXX=$CC_IS_CXX main.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -12234,32 +12282,21 @@
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
+stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
-./headers.at:394:  $PREPARSER ./parser
-stderr:
-./headers.at:394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-310. headers.at:199:  ok
-
-406. types.at:139: testing lalr1.cc api.value.type={struct bar} %header ...
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -12270,8 +12307,8 @@
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
-stderr:
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
@@ -12286,12 +12323,20 @@
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
+stderr:
+stdout:
 ./types.at:139:  $PREPARSER ./test
+./types.at:139:  $PREPARSER ./test
+stderr:
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -12376,19 +12421,18 @@
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+stderr:
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
@@ -12401,6 +12445,7 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
@@ -12411,20 +12456,20 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
@@ -12529,10 +12574,12 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+400. types.at:139:  ok
+
+stderr:
+407. types.at:139: testing lalr1.cc api.value.type={union foo} ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
@@ -12540,22 +12587,20 @@
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-400. types.at:139:  ok
-
-407. types.at:139: testing lalr1.cc api.value.type={union foo} ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 399. types.at:139:  ok
 
 408. types.at:139: testing lalr1.cc api.value.type={union foo} %header ...
@@ -12567,9 +12612,6 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-402. types.at:139:  ok
-
-409. types.at:139: testing lalr1.cc %union { float fval; int ival; }; ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
@@ -12578,34 +12620,42 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-401. types.at:139:  ok
+403. types.at:139:  ok
+stderr:
+stdout:
+./types.at:139:  $PREPARSER ./test
+stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
+404. types.at:139:  ok
 
+409. types.at:139: testing lalr1.cc %union { float fval; int ival; }; ...
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 410. types.at:139: testing lalr1.cc %union { float fval; int ival; }; %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-403. types.at:139:  ok
-
-411. types.at:139: testing lalr1.cc api.value.type=union ...
+stderr:
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-404. types.at:139:  ok
+402. types.at:139:  ok
 
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
-412. types.at:139: testing lalr1.cc api.value.type=union %header ...
+411. types.at:139: testing lalr1.cc api.value.type=union ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 stderr:
@@ -12613,7 +12663,7 @@
 405. types.at:139:  ok
 
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-413. types.at:139: testing lalr1.cc api.value.type=variant ...
+412. types.at:139: testing lalr1.cc api.value.type=union %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
@@ -12622,22 +12672,28 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+401. types.at:139:  ok
+
 stderr:
 stdout:
+413. types.at:139: testing lalr1.cc api.value.type=variant ...
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+406. types.at:139:  ok
+
+414. types.at:139: testing lalr1.cc api.value.type=variant %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
@@ -12656,15 +12712,23 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
+stderr:
+stdout:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139:  $PREPARSER ./test
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+stderr:
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -12718,33 +12782,33 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
+stderr:
+stdout:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-stderr:
-stdout:
 ./types.at:139:  $PREPARSER ./test
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -12760,42 +12824,39 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stdout:
 ./types.at:139:  $PREPARSER ./test
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-406. types.at:139:  ok
-
-414. types.at:139: testing lalr1.cc api.value.type=variant %header ...
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -12819,20 +12880,27 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+stderr:
 stderr:
 stdout:
+stdout:
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139:  $PREPARSER ./test
+./types.at:139:  $PREPARSER ./test
+stderr:
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 stderr:
@@ -12844,6 +12912,7 @@
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -12865,10 +12934,11 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stdout:
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+stderr:
+stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -12876,12 +12946,19 @@
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 stderr:
 stdout:
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139:  $PREPARSER ./test
+stderr:
+stderr:
+stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
@@ -12899,12 +12976,12 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
@@ -12928,13 +13005,6 @@
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
-stderr:
-stdout:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./types.at:139:  $PREPARSER ./test
-stderr:
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
@@ -12947,23 +13017,22 @@
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
 stdout:
+stderr:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -12971,23 +13040,23 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -13003,20 +13072,23 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./types.at:139:  $PREPARSER ./test
 stderr:
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+407. types.at:139:  ok
+
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+415. types.at:139: testing lalr1.cc api.value.type=variant ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
@@ -13049,6 +13121,9 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+408. types.at:139:  ok
+
+416. types.at:139: testing lalr1.cc api.value.type=variant %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
@@ -13065,43 +13140,36 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-407. types.at:139:  ok
-
+409. types.at:139:  ok
 stderr:
 stdout:
-415. types.at:139: testing lalr1.cc api.value.type=variant ...
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-408. types.at:139:  ok
+410. types.at:139:  ok
 
-416. types.at:139: testing lalr1.cc api.value.type=variant %header ...
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-409. types.at:139:  ok
 
+411. types.at:139:  ok
 417. types.at:139: testing lalr1.cc api.value.type=variant api.token.constructor ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+418. types.at:139: testing lalr1.cc api.value.type=variant api.token.constructor %header ...
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+
+419. types.at:139: testing lalr1.cc %code requires { #include <memory> } api.value.type=variant ...
+======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-410. types.at:139:  ok
-
-418. types.at:139: testing lalr1.cc api.value.type=variant api.token.constructor %header ...
-======== Testing with C++ standard flags: ''
+./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
@@ -13109,14 +13177,7 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-411. types.at:139:  ok
-
-419. types.at:139: testing lalr1.cc %code requires { #include <memory> } api.value.type=variant ...
 ======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-stderr:
-stdout:
-./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
@@ -13125,9 +13186,17 @@
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 412. types.at:139:  ok
+stderr:
+stdout:
+./types.at:139:  $PREPARSER ./test
+stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 420. types.at:139: testing lalr1.cc %code requires { #include <memory> } api.value.type=variant %header ...
 ======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
@@ -13157,23 +13226,15 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
-stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -13197,15 +13258,41 @@
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
+./types.at:139: ./check
+-std=c++03 not supported
+======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+stderr:
+stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+413. types.at:139:  ok
+
+421. types.at:139: testing lalr1.cc %code requires { #include <memory> } api.value.type=variant api.token.constructor ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
--std=c++03 not supported
+-std=c++11 not supported
+======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+stderr:
+stdout:
+./types.at:139: ./check
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+stderr:
+stdout:
+./types.at:139: ./check
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+stderr:
+stdout:
+./types.at:139:  $PREPARSER ./test
+stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
@@ -13217,7 +13304,7 @@
 stderr:
 stdout:
 ./types.at:139: ./check
--std=c++11 not supported
+-std=c++03 not supported
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
@@ -13237,7 +13324,8 @@
 stderr:
 stdout:
 ./types.at:139: ./check
--std=c++03 not supported
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+-std=c++11 not supported
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
@@ -13245,29 +13333,25 @@
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
-./types.at:139: ./check
--std=c++11 not supported
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139:  $PREPARSER ./test
 stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
-stdout:
 ./types.at:139:  $PREPARSER ./test
-./types.at:139: ./check
 stderr:
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-413. types.at:139:  ok
+414. types.at:139:  ok
 
-421. types.at:139: testing lalr1.cc %code requires { #include <memory> } api.value.type=variant api.token.constructor ...
+422. types.at:139: testing lalr1.cc %code requires { #include <memory> } api.value.type=variant api.token.constructor %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
@@ -13296,21 +13380,47 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+stderr:
+stdout:
+stderr:
+./types.at:139:  $PREPARSER ./test
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+======== Testing with C++ standard flags: ''
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+stderr:
+stdout:
+./types.at:139: ./check
+-std=c++98 not supported
 ======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+stderr:
+stdout:
+./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
-./types.at:139:  $PREPARSER ./test
+./types.at:139: ./check
+-std=c++03 not supported
+======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stdout:
+./types.at:139: ./check
+-std=c++11 not supported
 ======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+stderr:
+stdout:
+./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
@@ -13319,10 +13429,6 @@
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-stderr:
-stdout:
-./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
@@ -13366,45 +13472,29 @@
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
-./types.at:139: ./check
--std=c++11 not supported
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-stderr:
-stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
-stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 stderr:
 stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: ./check
+-std=c++11 not supported
+======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
+./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
@@ -13418,10 +13508,10 @@
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
@@ -13430,29 +13520,26 @@
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-414. types.at:139:  ok
-
-422. types.at:139: testing lalr1.cc %code requires { #include <memory> } api.value.type=variant api.token.constructor %header ...
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./types.at:139: ./check
+======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
@@ -13460,16 +13547,12 @@
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-stderr:
-stdout:
-./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
@@ -13481,21 +13564,17 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
+./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
@@ -13506,88 +13585,78 @@
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
+stderr:
+stdout:
 stdout:
 ./types.at:139:  $PREPARSER ./test
+./types.at:139: ./check
 stderr:
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
-./types.at:139: ./check
--std=c++98 not supported
+./types.at:139:  $PREPARSER ./test
+stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
-stderr:
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 stderr:
 stdout:
-./types.at:139: ./check
--std=c++03 not supported
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139:  $PREPARSER ./test
 stderr:
-stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-stdout:
-./types.at:139: ./check
+======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
-stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stdout:
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: ./check
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
--std=c++11 not supported
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
+./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -13598,9 +13667,19 @@
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
+stderr:
+./types.at:139:  $PREPARSER ./test
+stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
+stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+420. types.at:139:  ok
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+
+423. types.at:139: testing lalr1.cc %code requires { #include <memory> } api.value.type=variant api.token.constructor ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
@@ -13609,27 +13688,18 @@
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 stderr:
 stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
+./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
-stderr:
 stdout:
-stdout:
-./types.at:139:  $PREPARSER ./test
 ./types.at:139:  $PREPARSER ./test
 stderr:
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -13638,10 +13708,12 @@
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
-./types.at:139: ./check
+./types.at:139:  $PREPARSER ./test
+stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
@@ -13649,15 +13721,13 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-420. types.at:139:  ok
-
-423. types.at:139: testing lalr1.cc %code requires { #include <memory> } api.value.type=variant api.token.constructor ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -13665,7 +13735,6 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 419. types.at:139:  ok
 
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 424. types.at:139: testing lalr1.cc %code requires { #include <memory> } api.value.type=variant api.token.constructor %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
@@ -13679,7 +13748,7 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-417. types.at:139:  ok
+418. types.at:139:  ok
 
 425. types.at:139: testing glr.cc api.value.type={double} ...
 ======== Testing with C++ standard flags: ''
@@ -13690,53 +13759,80 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-418. types.at:139:  ok
-
-426. types.at:139: testing glr.cc api.value.type={double} %header ...
 ======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+stderr:
+stdout:
+./types.at:139: ./check
+-std=c++98 not supported
+======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+stderr:
+stdout:
+./types.at:139: ./check
+-std=c++03 not supported
+======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+417. types.at:139:  ok
+
+426. types.at:139: testing glr.cc api.value.type={double} %header ...
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+stderr:
+stdout:
+./types.at:139: ./check
+-std=c++11 not supported
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+421. types.at:139:  ok
+
+427. types.at:139: testing glr.cc api.value.type={variant} ...
 ======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
-stderr:
 ./types.at:139:  $PREPARSER ./test
-stdout:
 stderr:
-./types.at:139:  $PREPARSER ./test
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+stdout:
+./types.at:139:  $PREPARSER ./test
+stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+415. types.at:139:  ok
+
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+428. types.at:139: testing glr.cc api.value.type={variant} %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
-./types.at:139: ./check
--std=c++98 not supported
+./types.at:139:  $PREPARSER ./test
+stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -13744,39 +13840,33 @@
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
-stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+422. types.at:139:  ok
 ======== Testing with C++ standard flags: ''
-421. types.at:139:  ok
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 
-stderr:
-stdout:
-./types.at:139: ./check
-427. types.at:139: testing glr.cc api.value.type={variant} ...
+429. types.at:139: testing glr.cc api.value.type={struct foo} ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
--std=c++03 not supported
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
 -std=c++98 not supported
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
--std=c++11 not supported
+-std=c++03 not supported
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
--std=c++03 not supported
+-std=c++11 not supported
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
@@ -13786,13 +13876,10 @@
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
-./types.at:139: ./check
--std=c++11 not supported
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139:  $PREPARSER ./test
 stderr:
-stdout:
-./types.at:139: ./check
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
@@ -13809,8 +13896,7 @@
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -13818,24 +13904,28 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+416. types.at:139:  ok
+
+430. types.at:139: testing glr.cc api.value.type={struct foo} %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
@@ -13845,23 +13935,16 @@
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stdout:
-416. types.at:139:  ok
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-
-428. types.at:139: testing glr.cc api.value.type={variant} %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -13870,17 +13953,14 @@
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
-stderr:
-stdout:
 stdout:
+stderr:
 ./types.at:139: ./check
-./types.at:139:  $PREPARSER ./test
+stdout:
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-415. types.at:139:  ok
-
-429. types.at:139: testing glr.cc api.value.type={struct foo} ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
@@ -13892,11 +13972,11 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
@@ -13908,29 +13988,25 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
+./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
@@ -13939,22 +14015,14 @@
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-stderr:
-stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
-./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
@@ -13962,12 +14030,8 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-422. types.at:139:  ok
-
-430. types.at:139: testing glr.cc api.value.type={struct foo} %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -13976,14 +14040,14 @@
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -13994,6 +14058,11 @@
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
+./types.at:139: ./check
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+stderr:
+stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -14030,7 +14099,8 @@
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -14042,28 +14112,15 @@
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -14078,6 +14135,10 @@
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+stderr:
+stdout:
+./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
@@ -14103,6 +14164,7 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -14111,7 +14173,6 @@
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -14135,29 +14196,25 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 423. types.at:139:  ok
 
+431. types.at:139: testing glr.cc api.value.type={struct bar} ...
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
-431. types.at:139: testing glr.cc api.value.type={struct bar} ...
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-424. types.at:139:  ok
-
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-432. types.at:139: testing glr.cc api.value.type={struct bar} %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -14166,11 +14223,15 @@
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+424. types.at:139:  ok
+
+432. types.at:139: testing glr.cc api.value.type={struct bar} %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
@@ -14202,18 +14263,21 @@
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+425. types.at:139:  ok
+
+433. types.at:139: testing glr.cc api.value.type={union foo} ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -14245,26 +14309,26 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
@@ -14275,9 +14339,6 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-425. types.at:139:  ok
-
-433. types.at:139: testing glr.cc api.value.type={union foo} ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
@@ -14286,18 +14347,18 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-426. types.at:139:  ok
+428. types.at:139:  ok
 
 434. types.at:139: testing glr.cc api.value.type={union foo} %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-427. types.at:139:  ok
+426. types.at:139:  ok
 
 435. types.at:139: testing glr.cc %union { float fval; int ival; }; ...
 ======== Testing with C++ standard flags: ''
@@ -14307,10 +14368,10 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -14318,6 +14379,7 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -14326,20 +14388,33 @@
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+427. types.at:139:  ok
+
+stderr:
+stdout:
+./types.at:139:  $PREPARSER ./test
+436. types.at:139: testing glr.cc %union { float fval; int ival; }; %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+stderr:
 stderr:
 stdout:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./types.at:139:  $PREPARSER ./test
 stderr:
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+429. types.at:139:  ok
+
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+437. types.at:139: testing glr.cc api.value.type=union ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
@@ -14387,18 +14462,18 @@
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+430. types.at:139:  ok
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-429. types.at:139:  ok
 
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-436. types.at:139: testing glr.cc %union { float fval; int ival; }; %header ...
+438. types.at:139: testing glr.cc api.value.type=union %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
@@ -14407,9 +14482,6 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-428. types.at:139:  ok
-
-437. types.at:139: testing glr.cc api.value.type=union ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
@@ -14452,30 +14524,22 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
-stderr:
 stdout:
-stdout:
-./types.at:139:  $PREPARSER ./test
 ./types.at:139:  $PREPARSER ./test
 stderr:
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
@@ -14490,9 +14554,6 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-430. types.at:139:  ok
-
-438. types.at:139: testing glr.cc api.value.type=union %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
@@ -14526,19 +14587,19 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stdout:
 ./types.at:139:  $PREPARSER ./test
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
@@ -14559,7 +14620,6 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -14568,22 +14628,22 @@
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -14592,6 +14652,7 @@
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -14604,26 +14665,26 @@
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
 stdout:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./types.at:139:  $PREPARSER ./test
 stderr:
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -14639,23 +14700,23 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+stderr:
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stdout:
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -14684,8 +14745,17 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+======== Testing with C++ standard flags: ''
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+stdout:
+./types.at:139:  $PREPARSER ./test
+stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -14693,7 +14763,6 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 431. types.at:139:  ok
 
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 440. types.at:139: testing glr2.cc api.value.type={double} %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
@@ -14714,23 +14783,15 @@
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
@@ -14741,7 +14802,6 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -14750,14 +14810,22 @@
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+433. types.at:139:  ok
+
+441. types.at:139: testing glr2.cc api.value.type={variant} ...
 ======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+stderr:
+stdout:
+./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -14773,7 +14841,6 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -14782,14 +14849,12 @@
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-433. types.at:139:  ok
-
-441. types.at:139: testing glr2.cc api.value.type={variant} ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
@@ -14801,39 +14866,26 @@
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
-./types.at:139: ./check
--std=c++98 not supported
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-stderr:
-stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stdout:
 ./types.at:139: ./check
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+-std=c++98 not supported
 ======== Testing with C++ standard flags: ''
--std=c++03 not supported
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+434. types.at:139:  ok
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+
+442. types.at:139: testing glr2.cc api.value.type={variant} %header ...
+======== Testing with C++ standard flags: ''
 stderr:
 stdout:
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 ./types.at:139: ./check
 -std=c++98 not supported
 ======== Testing with C++ standard flags: ''
-stderr:
-stdout:
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
@@ -14844,37 +14896,30 @@
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-434. types.at:139:  ok
-
-442. types.at:139: testing glr2.cc api.value.type={variant} %header ...
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: ./check
 stderr:
 stdout:
-./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: ./check
+-std=c++03 not supported
+======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
+./types.at:139: ./check
 stdout:
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
@@ -14890,6 +14935,7 @@
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -14897,26 +14943,15 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-436. types.at:139:  ok
-
-444. types.at:139: testing glr2.cc api.value.type={struct foo} %header ...
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 437. types.at:139:  ok
 
-445. types.at:139: testing glr2.cc api.value.type={struct bar} ...
+444. types.at:139: testing glr2.cc api.value.type={struct foo} %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
@@ -14926,19 +14961,6 @@
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -14958,20 +14980,23 @@
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
+./types.at:139: ./check
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+stderr:
+stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+436. types.at:139:  ok
+
+445. types.at:139: testing glr2.cc api.value.type={struct bar} ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-stderr:
-stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
@@ -14984,25 +15009,28 @@
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+438. types.at:139:  ok
+
+446. types.at:139: testing glr2.cc api.value.type={struct bar} %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
--std=c++98 not supported
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 stderr:
 stdout:
-./types.at:139: ./check
--std=c++03 not supported
+./types.at:139:  $PREPARSER ./test
+stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
@@ -15038,49 +15066,46 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-438. types.at:139:  ok
-
-446. types.at:139: testing glr2.cc api.value.type={struct bar} %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+-std=c++98 not supported
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
--std=c++98 not supported
+-std=c++03 not supported
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
+./types.at:139: ./check
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+stderr:
+stderr:
+stdout:
+stdout:
+./types.at:139:  $PREPARSER ./test
 ./types.at:139:  $PREPARSER ./test
 stderr:
+stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-stderr:
-stdout:
-./types.at:139: ./check
--std=c++03 not supported
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
 -std=c++98 not supported
 ======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
@@ -15106,25 +15131,13 @@
 stderr:
 stdout:
 ./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+-std=c++98 not supported
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+-std=c++03 not supported
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
@@ -15141,28 +15154,28 @@
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
+stderr:
+stdout:
+./types.at:139: ./check
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
@@ -15191,10 +15204,10 @@
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
@@ -15213,6 +15226,11 @@
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
+./types.at:139: ./check
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+stderr:
+stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -15222,11 +15240,6 @@
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-stderr:
-stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
@@ -15263,7 +15276,6 @@
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -15271,6 +15283,7 @@
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
@@ -15293,12 +15306,24 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+======== Testing with C++ standard flags: ''
+stdout:
+./types.at:139:  $PREPARSER ./test
+stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+stderr:
+stdout:
+./types.at:139: ./check
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
@@ -15341,13 +15366,25 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stdout:
+./types.at:139:  $PREPARSER ./test
+======== Testing with C++ standard flags: ''
+stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
+stderr:
+./types.at:139: ./check
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -15381,19 +15418,31 @@
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
+./types.at:139: ./check
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+stderr:
+stdout:
+stderr:
+./types.at:139:  $PREPARSER ./test
+stdout:
+stderr:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-stderr:
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+stderr:
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
@@ -15413,21 +15462,21 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-439. types.at:139:  ok
-
-447. types.at:139: testing glr2.cc api.value.type={union foo} ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+439. types.at:139:  ok
+
+447. types.at:139: testing glr2.cc api.value.type={union foo} ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
@@ -15459,6 +15508,11 @@
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
+./types.at:139: ./check
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+stderr:
+stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -15471,11 +15525,6 @@
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -15486,18 +15535,21 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+441. types.at:139:  ok
+
 stderr:
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+449. types.at:139: testing glr2.cc %union { float fval; int ival; }; ...
+======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -15507,9 +15559,19 @@
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
+./types.at:139:  $PREPARSER ./test
+stderr:
+stderr:
+stdout:
 ./types.at:139: ./check
 -std=c++98 not supported
 ======== Testing with C++ standard flags: ''
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+442. types.at:139:  ok
+
+450. types.at:139: testing glr2.cc %union { float fval; int ival; }; %header ...
+======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
@@ -15521,26 +15583,28 @@
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+stderr:
+stdout:
+./types.at:139: ./check
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-441. types.at:139:  ok
-
-449. types.at:139: testing glr2.cc %union { float fval; int ival; }; ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
@@ -15560,24 +15624,16 @@
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-442. types.at:139:  ok
-
-450. types.at:139: testing glr2.cc %union { float fval; int ival; }; %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -15585,9 +15641,8 @@
 
 stderr:
 stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: ./check
+-std=c++98 not supported
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 451. types.at:139: testing glr2.cc api.value.type=union ...
@@ -15595,52 +15650,28 @@
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 stderr:
+./types.at:139: ./check
+-std=c++03 not supported
+======== Testing with C++ standard flags: ''
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-445. types.at:139:  ok
-
-452. types.at:139: testing glr2.cc api.value.type=union %header ...
-======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-444. types.at:139:  ok
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-
-453. types.at:377: testing lalr1.cc: Named %union ...
-./types.at:377: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-453. types.at:377:  ok
-
-454. types.at:377: testing glr.cc: Named %union ...
-./types.at:377: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
-454. types.at:377:  ok
-
-455. scanner.at:326: testing Token numbers: yacc.c ...
-./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
-./scanner.at:326: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+444. types.at:139:  ok
+
+452. types.at:139: testing glr2.cc api.value.type=union %header ...
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
@@ -15654,18 +15685,7 @@
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-stderr:
-stdout:
-./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-./scanner.at:326:  $PREPARSER ./input
-stderr:
-./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-455. scanner.at:326:  ok
-
-456. scanner.at:326: testing Token numbers: yacc.c api.token.raw ...
-./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
-./scanner.at:326: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
@@ -15674,36 +15694,54 @@
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
+./types.at:139:  $PREPARSER ./test
+stderr:
+stderr:
+stdout:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./types.at:139: ./check
 -std=c++03 not supported
 ======== Testing with C++ standard flags: ''
+======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
+stderr:
+stdout:
 stdout:
 ./types.at:139:  $PREPARSER ./test
+./types.at:139: ./check
 stderr:
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
+445. types.at:139:  ok
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+453. types.at:377: testing lalr1.cc: Named %union ...
+./types.at:377: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+453. types.at:377:  ok
+
+454. types.at:377: testing glr.cc: Named %union ...
+./types.at:377: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+454. types.at:377:  ok
+
+455. scanner.at:326: testing Token numbers: yacc.c ...
+./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+./scanner.at:326: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
 ./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c
 ./scanner.at:326:  $PREPARSER ./input
 stderr:
 ./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-456. scanner.at:326:  ok
+455. scanner.at:326:  ok
 
-457. scanner.at:326: testing Token numbers: glr.c ...
+456. scanner.at:326: testing Token numbers: yacc.c api.token.raw ...
 ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 ./scanner.at:326: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
@@ -15711,20 +15749,29 @@
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+446. types.at:139:  ok
+
+457. scanner.at:326: testing Token numbers: glr.c ...
+./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+./scanner.at:326: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
-./types.at:139: ./check
--std=c++98 not supported
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c
+./scanner.at:326:  $PREPARSER ./input
+stderr:
+./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+456. scanner.at:326:  ok
+
 stderr:
 stdout:
-./types.at:139: ./check
--std=c++03 not supported
+458. scanner.at:326: testing Token numbers: glr.c api.token.raw ...
+./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+./types.at:139:  $PREPARSER ./test
+stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./scanner.at:326: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
@@ -15737,17 +15784,17 @@
 stderr:
 ./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 457. scanner.at:326:  ok
-
 stderr:
-458. scanner.at:326: testing Token numbers: glr.c api.token.raw ...
-./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-./scanner.at:326: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+459. scanner.at:326: testing Token numbers: lalr1.cc ...
+./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+./scanner.at:326: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
@@ -15756,36 +15803,38 @@
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
+./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c
+./scanner.at:326:  $PREPARSER ./input
+stderr:
+./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+458. scanner.at:326:  ok
+
+460. scanner.at:326: testing Token numbers: lalr1.cc api.token.raw ...
+./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+stderr:
+stdout:
 ./types.at:139: ./check
 -std=c++03 not supported
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-stderr:
-stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./scanner.at:326: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-446. types.at:139:  ok
-
-459. scanner.at:326: testing Token numbers: lalr1.cc ...
-./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./scanner.at:326: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
+======== Testing with C++ standard flags: ''
 stdout:
-./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c
-./scanner.at:326:  $PREPARSER ./input
+./types.at:139: ./check
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
-./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-458. scanner.at:326:  ok
-
-460. scanner.at:326: testing Token numbers: lalr1.cc api.token.raw ...
-./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./scanner.at:326: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stdout:
+./types.at:139: ./check
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -15807,28 +15856,28 @@
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
+./types.at:139: ./check
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+stderr:
+stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
@@ -15836,45 +15885,33 @@
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-stderr:
-stdout:
 ./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc
 ./scanner.at:326:  $PREPARSER ./input
 stderr:
 ./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-459. scanner.at:326:  ok
+460. scanner.at:326:  ok
 
 461. scanner.at:326: testing Token numbers: glr.cc ...
 ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
 stderr:
 stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./scanner.at:326: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
 ./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc
+./scanner.at:326: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 ./scanner.at:326:  $PREPARSER ./input
 stderr:
 ./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-460. scanner.at:326:  ok
+459. scanner.at:326:  ok
 
-stderr:
 462. scanner.at:326: testing Token numbers: glr.cc api.token.raw ...
-stdout:
 ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+./scanner.at:326: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-./scanner.at:326: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./types.at:139: ./check
@@ -15884,36 +15921,48 @@
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
+stderr:
+stdout:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139:  $PREPARSER ./test
+stderr:
+======== Testing with C++ standard flags: ''
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+stderr:
+stdout:
+./types.at:139: ./check
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 stderr:
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc
 ./scanner.at:326:  $PREPARSER ./input
 stderr:
-./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-461. scanner.at:326:  ok
-
-463. scanner.at:326: testing Token numbers: glr2.cc ...
-./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./scanner.at:326: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
 stdout:
 ./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc
+stderr:
+./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+461. scanner.at:326:  ok
 ./scanner.at:326:  $PREPARSER ./input
 stderr:
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+
 462. scanner.at:326:  ok
+463. scanner.at:326: testing Token numbers: glr2.cc ...
+./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
 
 464. scanner.at:326: testing Token numbers: glr2.cc api.token.raw ...
 ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
 ./scanner.at:326: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./scanner.at:326: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
@@ -15925,19 +15974,12 @@
 stdout:
 ./types.at:139: ./check
 ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ======== Testing with C++ standard flags: ''
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
@@ -15947,11 +15989,6 @@
 ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
 ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -15978,56 +16015,79 @@
 stdout:
 ./types.at:139:  $PREPARSER ./test
 stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-stderr:
-stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc
 ./scanner.at:326:  $PREPARSER ./input
 stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
 ./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 463. scanner.at:326:  ok
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 
 465. scanner.at:326: testing Token numbers: lalr1.d ...
 ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.d input.y
+stderr:
+stdout:
+./types.at:139: ./check
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 465. scanner.at:326:  skipped (scanner.at:326)
 
 466. scanner.at:326: testing Token numbers: lalr1.d api.token.raw ...
 ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.d input.y
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 466. scanner.at:326:  skipped (scanner.at:326)
+stderr:
+stdout:
+./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc
 
+./scanner.at:326:  $PREPARSER ./input
+stderr:
+./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+464. scanner.at:326:  ok
 467. scanner.at:326: testing Token numbers: lalr1.java ...
 ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.java input.y
-467. scanner.at:326:  skipped (scanner.at:326)
 
 468. scanner.at:326: testing Token numbers: lalr1.java api.token.raw ...
 ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.java input.y
-468. scanner.at:326:  skipped (scanner.at:326)
+467. scanner.at:326:  skipped (scanner.at:326)
 
+468. scanner.at:326:  skipped (scanner.at:326)
 469. scanner.at:330: testing Token numbers: lalr1.cc api.token.raw api.value.type=variant api.token.constructor ...
 ./scanner.at:330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-stderr:
-stdout:
-./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc
-./scanner.at:326:  $PREPARSER ./input
-stderr:
-./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-464. scanner.at:326:  ok
 
-./scanner.at:330: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 470. calc.at:1334: testing Calculator parse.trace  ...
 ./calc.at:1334: mv calc.y.tmp calc.y
 
 ./calc.at:1334: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+./scanner.at:330: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 ./calc.at:1334: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
 stderr:
 stdout:
+./types.at:139:  $PREPARSER ./test
+stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+stderr:
+stderr:
+stdout:
+./types.at:139:  $PREPARSER ./test
+stderr:
+stdout:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./types.at:139: ./check
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+stderr:
+stdout:
+./types.at:139: ./check
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+stderr:
+stdout:
 ./calc.at:1334: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
 ./calc.at:1334: "$PERL" -ne '
   chomp;
@@ -16056,6 +16116,7 @@
   | (2^2)^3 = 64
 ./calc.at:1334:  $PREPARSER ./calc  input
 stderr:
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 Starting parse
 Entering state 0
 Stack now 0
@@ -20028,10 +20089,13 @@
   }eg
 ' expout || exit 77
 ./calc.at:1334: cat stderr
+stderr:
 input:
   | 1 + 2 * 3 + !+ ++
+stdout:
 ./calc.at:1334:  $PREPARSER ./calc  input
 stderr:
+./types.at:139:  $PREPARSER ./test
 Starting parse
 Entering state 0
 Stack now 0
@@ -20114,6 +20178,8 @@
 Cleanup: popping nterm exp (1.1: 7)
 ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -20195,6 +20261,8 @@
 Cleanup: popping token '+' (1.1: )
 Cleanup: popping nterm exp (1.1: 7)
 ./calc.at:1334: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 input:
   | 1 + 2 * 3 + !- ++
 ./calc.at:1334:  $PREPARSER ./calc  input
@@ -20371,8 +20439,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
 ./calc.at:1334: cat stderr
+stdout:
+./types.at:139: ./check
 input:
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
   | 1 + 2 * 3 + !* ++
 ./calc.at:1334:  $PREPARSER ./calc  input
 stderr:
@@ -20550,6 +20622,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./calc.at:1334: cat stderr
 input:
   | (#) + (#) = 2222
@@ -20678,7 +20751,6 @@
 Cleanup: popping nterm input (1.1: )
 ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -20800,9 +20872,6 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (1.1: )
 Cleanup: popping nterm input (1.1: )
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
 ./calc.at:1334: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -20813,14 +20882,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
 ./calc.at:1334: cat stderr
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 input:
   | (1 + #) = 1111
 ./calc.at:1334:  $PREPARSER ./calc  input
 stderr:
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -20920,6 +20987,9 @@
 Cleanup: popping token "end of input" (1.1: )
 Cleanup: popping nterm input (1.1: )
 ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stdout:
+./types.at:139:  $PREPARSER ./test
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -21019,6 +21089,9 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (1.1: )
 Cleanup: popping nterm input (1.1: )
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 ./calc.at:1334: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -21131,6 +21204,10 @@
 Cleanup: popping nterm input (1.1: )
 ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
+stdout:
+./types.at:139: ./check
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 Starting parse
 Entering state 0
 Stack now 0
@@ -21238,6 +21315,7 @@
 ' expout || exit 77
 ./calc.at:1334: cat stderr
 input:
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
   | (1 + # + 1) = 1111
 ./calc.at:1334:  $PREPARSER ./calc  input
 stderr:
@@ -21471,10 +21549,6 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (1.1: )
 Cleanup: popping nterm input (1.1: )
-stderr:
-stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./calc.at:1334: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -21784,22 +21858,27 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./calc.at:1334: cat stderr
 470. calc.at:1334:  ok
+stderr:
 
+stdout:
+./scanner.at:330: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc
+./scanner.at:330:  $PREPARSER ./input
+stderr:
+./scanner.at:330: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+469. scanner.at:330:  ok
 471. calc.at:1336: testing Calculator %header  ...
 ./calc.at:1336: mv calc.y.tmp calc.y
 
+
 ./calc.at:1336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+472. calc.at:1337: testing Calculator %debug %locations  ...
+./calc.at:1337: mv calc.y.tmp calc.y
+
+./calc.at:1337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
 ./calc.at:1336: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./calc.at:1337: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
 stderr:
 stdout:
 ./calc.at:1336: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
@@ -21853,386 +21932,9 @@
 ' expout || exit 77
 stderr:
 stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./calc.at:1336: cat stderr
-input:
-  | 1//2
-./calc.at:1336:  $PREPARSER ./calc  input
-stderr:
-syntax error
-./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./calc.at:1336: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1336: cat stderr
-input:
-  | error
-./calc.at:1336:  $PREPARSER ./calc  input
-stderr:
-syntax error
-./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-./calc.at:1336: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1336: cat stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1336:  $PREPARSER ./calc  input
-stderr:
-syntax error
-./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-./calc.at:1336: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1336: cat stderr
-input:
-  | 
-  | +1
-./calc.at:1336:  $PREPARSER ./calc  input
-stderr:
-syntax error
-./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-./calc.at:1336: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1336: cat stderr
-./calc.at:1336:  $PREPARSER ./calc  /dev/null
-stderr:
-syntax error
-./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-./calc.at:1336: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1336: cat stderr
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1336:  $PREPARSER ./calc  input
-stderr:
-syntax error
-syntax error
-syntax error
-syntax error
-error: 4444 != 1
-./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-syntax error
-syntax error
-syntax error
-error: 4444 != 1
-./calc.at:1336: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1336: cat stderr
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1336:  $PREPARSER ./calc  input
-stderr:
-syntax error
-error: 2222 != 1
-./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-error: 2222 != 1
-./calc.at:1336: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1336: cat stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1336:  $PREPARSER ./calc  input
-stderr:
-syntax error
-syntax error
-error: 2222 != 1
-./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-syntax error
-error: 2222 != 1
-./calc.at:1336: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1336: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1336:  $PREPARSER ./calc  input
-stderr:
-syntax error
-syntax error
-syntax error
-./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-syntax error
-syntax error
-./calc.at:1336: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1336: cat stderr
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1336:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1336: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1336:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1336: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1336: cat stderr
-input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1336:  $PREPARSER ./calc  input
-stderr:
-memory exhausted
-./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-memory exhausted
-./calc.at:1336: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1336: cat stderr
-input:
-  | (#) + (#) = 2222
-./calc.at:1336:  $PREPARSER ./calc  input
-stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1336: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1336: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1336:  $PREPARSER ./calc  input
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1336: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1336: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1336:  $PREPARSER ./calc  input
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1336: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1336: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1336:  $PREPARSER ./calc  input
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1336: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-stdout:
-./scanner.at:330: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc
-./calc.at:1336: cat stderr
-stderr:
-./scanner.at:330:  $PREPARSER ./input
-input:
-stdout:
-stderr:
-  | (1 + 1) / (1 - 1)
-./types.at:139:  $PREPARSER ./test
-./calc.at:1336:  $PREPARSER ./calc  input
-stderr:
-stderr:
-./scanner.at:330: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-error: null divisor
-469. scanner.at:330:  ok
-./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-error: null divisor
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-
-./calc.at:1336: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1337: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
 ./calc.at:1336: cat stderr
-471. calc.at:1336:  ok
-472. calc.at:1337: testing Calculator %debug %locations  ...
-./calc.at:1337: mv calc.y.tmp calc.y
-
-./calc.at:1337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-
-473. calc.at:1338: testing Calculator %locations api.location.type={Span}  ...
-./calc.at:1338: mv calc.y.tmp calc.y
-
-./calc.at:1338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-./calc.at:1337: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
-stderr:
-stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./calc.at:1338: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-stderr:
-stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-stdout:
-./calc.at:1338: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
-./calc.at:1338: "$PERL" -ne '
+./calc.at:1337: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -22244,146 +21946,9 @@
         )' calc.c
 
 input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1338:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1338: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | 1 2
-./calc.at:1338:  $PREPARSER ./calc  input
-stderr:
-1.3: syntax error
-./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.3: syntax error
-./calc.at:1338: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1338: cat stderr
 input:
   | 1//2
-./calc.at:1338:  $PREPARSER ./calc  input
-stderr:
-1.3: syntax error
-./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.3: syntax error
-stderr:
-./calc.at:1338: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1338: cat stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-input:
-  | error
-./calc.at:1338:  $PREPARSER ./calc  input
-stderr:
-1.1: syntax error
-./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.1: syntax error
-./calc.at:1338: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1338: cat stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1338:  $PREPARSER ./calc  input
-stderr:
-1.7: syntax error
-./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.7: syntax error
-./calc.at:1338: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1338: cat stderr
-stderr:
-stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-input:
-  | 
-  | +1
-./calc.at:1338:  $PREPARSER ./calc  input
-stderr:
-2.1: syntax error
-./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-stdout:
-2.1: syntax error
-./calc.at:1337: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./calc.at:1337: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c
-
-input:
-./calc.at:1338: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1336:  $PREPARSER ./calc  input
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -22399,6 +21964,9 @@
   | (2^2)^3 = 64
 ./calc.at:1337:  $PREPARSER ./calc  input
 stderr:
+stderr:
+syntax error
+./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -23418,6 +22986,8 @@
 Cleanup: popping nterm input (1.1-14.0: )
 ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
+syntax error
 Starting parse
 Entering state 0
 Stack now 0
@@ -24436,12 +24006,19 @@
 Cleanup: popping token "end of input" (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
 ./calc.at:1337: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-./calc.at:1338: cat stderr
 input:
+./calc.at:1336: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
   | 1 2
 ./calc.at:1337:  $PREPARSER ./calc  input
-./calc.at:1338:  $PREPARSER ./calc  /dev/null
-stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -24464,10 +24041,8 @@
 Cleanup: discarding lookahead token "number" (1.3: 2)
 Stack now 0
 ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.1: syntax error
-./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 stderr:
+./calc.at:1336: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -24488,7 +24063,9 @@
 Stack now 0
 Cleanup: discarding lookahead token "number" (1.3: 2)
 Stack now 0
-1.1: syntax error
+input:
+  | error
+./calc.at:1336:  $PREPARSER ./calc  input
 ./calc.at:1337: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -24499,45 +24076,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1338: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1338: cat stderr
-./calc.at:1337: cat stderr
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1338:  $PREPARSER ./calc  input
 stderr:
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
-./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
+./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
+./calc.at:1337: cat stderr
+syntax error
 input:
-./calc.at:1338: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
   | 1//2
 ./calc.at:1337:  $PREPARSER ./calc  input
 stderr:
@@ -24569,7 +24114,16 @@
 Cleanup: discarding lookahead token '/' (1.3: )
 Stack now 0
 ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1338: cat stderr
+./calc.at:1336: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -24598,9 +24152,6 @@
 Stack now 0
 Cleanup: discarding lookahead token '/' (1.3: )
 Stack now 0
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1338:  $PREPARSER ./calc  input
 ./calc.at:1337: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -24611,32 +24162,18 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-1.11: syntax error
-1.1-16: error: 2222 != 1
-./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.11: syntax error
-1.1-16: error: 2222 != 1
-./calc.at:1338: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1336: cat stderr
 ./calc.at:1337: cat stderr
-./calc.at:1338: cat stderr
 input:
+  | 1 = 2 = 3
+./calc.at:1336:  $PREPARSER ./calc  input
+input:
+stderr:
   | error
 ./calc.at:1337:  $PREPARSER ./calc  input
-input:
+syntax error
+./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-  | (- *) + (1 2) = 1
-./calc.at:1338:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -24647,11 +24184,8 @@
 Stack now 0
 stderr:
 ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
 stderr:
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -24660,11 +24194,7 @@
 1.1: syntax error
 Cleanup: discarding lookahead token "invalid token" (1.1: )
 Stack now 0
-stderr:
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-./calc.at:1337: "$PERL" -pi -e 'use strict;
+./calc.at:1336: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -24674,7 +24204,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1338: "$PERL" -pi -e 'use strict;
+./calc.at:1337: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -24684,10 +24214,22 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1336: cat stderr
 ./calc.at:1337: cat stderr
 input:
+input:
+stderr:
+stdout:
+  | 
+  | +1
+./calc.at:1336:  $PREPARSER ./calc  input
+stderr:
   | 1 = 2 = 3
+syntax error
+./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1337:  $PREPARSER ./calc  input
+./types.at:139:  $PREPARSER ./test
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -24729,8 +24271,9 @@
 Cleanup: discarding lookahead token '=' (1.7: )
 Stack now 0
 ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1338: cat stderr
 stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
 Starting parse
 Entering state 0
 Stack now 0
@@ -24770,8 +24313,8 @@
 Stack now 0
 Cleanup: discarding lookahead token '=' (1.7: )
 Stack now 0
-input:
-  | (* *) + (*) + (*)
+stderr:
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 ./calc.at:1337: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -24782,18 +24325,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1338:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
+syntax error
 ./calc.at:1337: cat stderr
-./calc.at:1338: "$PERL" -pi -e 'use strict;
+./calc.at:1336: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -24804,6 +24338,7 @@
   }eg
 ' expout || exit 77
 input:
+./calc.at:1336: cat stderr
   | 
   | +1
 ./calc.at:1337:  $PREPARSER ./calc  input
@@ -24834,7 +24369,8 @@
 Cleanup: discarding lookahead token '+' (2.1: )
 Stack now 0
 ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1338: cat stderr
+./calc.at:1336:  $PREPARSER ./calc  /dev/null
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -24861,10 +24397,12 @@
 Stack now 0
 Cleanup: discarding lookahead token '+' (2.1: )
 Stack now 0
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1338:  $PREPARSER ./calc  input
+syntax error
+./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
+stdout:
+syntax error
 ./calc.at:1337: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -24875,17 +24413,21 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1338: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+./types.at:139: ./check
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
 ./calc.at:1337: cat stderr
-input:
+./calc.at:1336: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1337:  $PREPARSER ./calc  /dev/null
-  | 1 + 2 * 3 + !- ++
-./calc.at:1338:  $PREPARSER ./calc  input
-stderr:
 stderr:
-./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -24895,17 +24437,7 @@
 Cleanup: discarding lookahead token "end of input" (1.1: )
 Stack now 0
 ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1338: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1336: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -24915,6 +24447,9 @@
 1.1: syntax error
 Cleanup: discarding lookahead token "end of input" (1.1: )
 Stack now 0
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1336:  $PREPARSER ./calc  input
 ./calc.at:1337: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -24925,18 +24460,35 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+syntax error
+syntax error
+syntax error
+syntax error
+error: 4444 != 1
+./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+syntax error
+syntax error
+syntax error
+syntax error
+error: 4444 != 1
 ./calc.at:1337: cat stderr
-./calc.at:1338: cat stderr
-input:
 input:
-  | 1 + 2 * 3 + !* ++
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1338:  $PREPARSER ./calc  input
 ./calc.at:1337:  $PREPARSER ./calc  input
+./calc.at:1336: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
-1.14: memory exhausted
-stderr:
-./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -25254,9 +24806,7 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-stderr:
 ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.14: memory exhausted
 stderr:
 Starting parse
 Entering state 0
@@ -25575,16 +25125,8 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1338: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1336: cat stderr
+input:
 ./calc.at:1337: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -25595,19 +25137,20 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1338: cat stderr
+  | (!!) + (1 2) = 1
+./calc.at:1336:  $PREPARSER ./calc  input
+stderr:
+syntax error
+error: 2222 != 1
+./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./calc.at:1337: cat stderr
+syntax error
+error: 2222 != 1
 input:
-input:
-  | (#) + (#) = 2222
   | (!!) + (1 2) = 1
 ./calc.at:1337:  $PREPARSER ./calc  input
-./calc.at:1338:  $PREPARSER ./calc  input
 stderr:
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -25749,9 +25292,16 @@
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
+./calc.at:1336: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -25893,16 +25443,7 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1338: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1336: cat stderr
 ./calc.at:1337: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -25913,19 +25454,23 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1337: cat stderr
-./calc.at:1338: cat stderr
 input:
-input:
-  | (1 + #) = 1111
-./calc.at:1338:  $PREPARSER ./calc  input
+  | (- *) + (1 2) = 1
+./calc.at:1336:  $PREPARSER ./calc  input
+./calc.at:1337: cat stderr
 stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
+syntax error
+error: 2222 != 1
+./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+syntax error
+syntax error
+error: 2222 != 1
+input:
   | (- *) + (1 2) = 1
 ./calc.at:1337:  $PREPARSER ./calc  input
 stderr:
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -26075,7 +25620,6 @@
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.6: syntax error: invalid character: '#'
 stderr:
 Starting parse
 Entering state 0
@@ -26225,7 +25769,7 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1338: "$PERL" -pi -e 'use strict;
+./calc.at:1336: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -26245,15 +25789,19 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1338: cat stderr
+./calc.at:1336: cat stderr
 ./calc.at:1337: cat stderr
 input:
+  | (* *) + (*) + (*)
+./calc.at:1336:  $PREPARSER ./calc  input
 input:
-  | (# + 1) = 1111
+stderr:
   | (* *) + (*) + (*)
-./calc.at:1338:  $PREPARSER ./calc  input
 ./calc.at:1337:  $PREPARSER ./calc  input
-stderr:
+syntax error
+syntax error
+syntax error
+./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -26407,21 +25955,11 @@
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.2: syntax error: invalid character: '#'
-./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.2: syntax error: invalid character: '#'
+syntax error
+syntax error
+syntax error
 stderr:
-./calc.at:1338: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 Starting parse
 Entering state 0
 Stack now 0
@@ -26573,8 +26111,7 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1338: cat stderr
-./calc.at:1337: "$PERL" -pi -e 'use strict;
+./calc.at:1336: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -26584,16 +26121,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1338:  $PREPARSER ./calc  input
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1337: cat stderr
-1.6: syntax error: invalid character: '#'
-./calc.at:1338: "$PERL" -pi -e 'use strict;
+./calc.at:1337: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -26603,10 +26131,18 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1336: cat stderr
+input:
+./calc.at:1337: cat stderr
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1336:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
   | 1 + 2 * 3 + !+ ++
 ./calc.at:1337:  $PREPARSER ./calc  input
-./calc.at:1338: cat stderr
+stderr:
+./calc.at:1336: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -26690,10 +26226,6 @@
 Cleanup: popping nterm exp (1.1-9: 7)
 ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1338:  $PREPARSER ./calc  input
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -26774,25 +26306,17 @@
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
+input:
 ./calc.at:1337: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-1.11-17: error: null divisor
-./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 + !- ++
+./calc.at:1336:  $PREPARSER ./calc  input
 stderr:
-1.11-17: error: null divisor
 input:
+./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 1 + 2 * 3 + !- ++
 ./calc.at:1337:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1338: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -26955,7 +26479,16 @@
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1338: cat stderr
+./calc.at:1336: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1337: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -26966,12 +26499,20 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-473. calc.at:1338: ./calc.at:1337: cat stderr
- ok
+./calc.at:1336: cat stderr
+./calc.at:1337: cat stderr
+input:
+  | 1 + 2 * 3 + !* ++
+./calc.at:1336:  $PREPARSER ./calc  input
+stderr:
+memory exhausted
+./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
   | 1 + 2 * 3 + !* ++
 ./calc.at:1337:  $PREPARSER ./calc  input
 stderr:
+memory exhausted
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -27136,7 +26677,16 @@
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-
+./calc.at:1336: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1337: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -27147,11 +26697,19 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1336: cat stderr
 ./calc.at:1337: cat stderr
 input:
+input:
+  | (#) + (#) = 2222
+./calc.at:1336:  $PREPARSER ./calc  input
   | (#) + (#) = 2222
 ./calc.at:1337:  $PREPARSER ./calc  input
 stderr:
+stderr:
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -27274,10 +26832,10 @@
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-474. calc.at:1340: testing Calculator %name-prefix "calc"  ...
 stderr:
-./calc.at:1340: mv calc.y.tmp calc.y
-
+stderr:
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
 Starting parse
 Entering state 0
 Stack now 0
@@ -27399,7 +26957,6 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
 ./calc.at:1337: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -27410,9 +26967,19 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1336: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1337: cat stderr
 input:
-./calc.at:1340: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
+./calc.at:1336: cat stderr
   | (1 + #) = 1111
 ./calc.at:1337:  $PREPARSER ./calc  input
 stderr:
@@ -27515,7 +27082,10 @@
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
 stderr:
+  | (1 + #) = 1111
+./calc.at:1336:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -27614,6 +27184,9 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+syntax error: invalid character: '#'
+./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1337: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -27624,10 +27197,22 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+syntax error: invalid character: '#'
 ./calc.at:1337: cat stderr
 input:
   | (# + 1) = 1111
 ./calc.at:1337:  $PREPARSER ./calc  input
+./calc.at:1336: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -27821,6 +27406,7 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1336: cat stderr
 ./calc.at:1337: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -27831,8 +27417,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+  | (# + 1) = 1111
+./calc.at:1336:  $PREPARSER ./calc  input
+stderr:
 ./calc.at:1337: cat stderr
+syntax error: invalid character: '#'
+./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 input:
+syntax error: invalid character: '#'
   | (1 + # + 1) = 1111
 ./calc.at:1337:  $PREPARSER ./calc  input
 stderr:
@@ -28066,6 +27660,16 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1336: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1337: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -28076,10 +27680,19 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1336: cat stderr
+input:
+  | (1 + # + 1) = 1111
+./calc.at:1336:  $PREPARSER ./calc  input
+stderr:
 ./calc.at:1337: cat stderr
+syntax error: invalid character: '#'
+./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 input:
   | (1 + 1) / (1 - 1)
 ./calc.at:1337:  $PREPARSER ./calc  input
+syntax error: invalid character: '#'
 stderr:
 Starting parse
 Entering state 0
@@ -28224,6 +27837,16 @@
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1336: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Stack now 0
@@ -28375,14 +27998,53 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1336: cat stderr
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1336:  $PREPARSER ./calc  input
 ./calc.at:1337: cat stderr
+stderr:
+error: null divisor
+./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+error: null divisor
 472. calc.at:1337:  ok
+./calc.at:1336: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 
-475. calc.at:1341: testing Calculator %verbose  ...
-./calc.at:1341: mv calc.y.tmp calc.y
+./calc.at:1336: cat stderr
+471. calc.at:1336:  ok
 
-./calc.at:1341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-./calc.at:1341: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
+473. calc.at:1338: testing Calculator %locations api.location.type={Span}  ...
+./calc.at:1338: mv calc.y.tmp calc.y
+
+./calc.at:1338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+stderr:
+474. calc.at:1340: testing Calculator %name-prefix "calc"  ...
+stdout:
+./types.at:139:  $PREPARSER ./test
+./calc.at:1340: mv calc.y.tmp calc.y
+
+stderr:
+./calc.at:1340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./calc.at:1338: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./calc.at:1340: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
+stderr:
+stdout:
+./types.at:139: ./check
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./calc.at:1340: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
@@ -28434,15 +28096,57 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+stdout:
+./calc.at:1338: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
 ./calc.at:1340: cat stderr
+./calc.at:1338: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c
+
+input:
 input:
   | 1//2
 ./calc.at:1340:  $PREPARSER ./calc  input
 stderr:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1338:  $PREPARSER ./calc  input
+stderr:
+stderr:
+stdout:
+./types.at:139:  $PREPARSER ./test
 syntax error
 ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
 stderr:
 syntax error
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1338: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+======== Testing with C++ standard flags: ''
+input:
+  | 1 2
+./calc.at:1338:  $PREPARSER ./calc  input
 ./calc.at:1340: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -28453,16 +28157,14 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1340: cat stderr
-input:
-  | error
-./calc.at:1340:  $PREPARSER ./calc  input
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
-syntax error
-./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.3: syntax error
+./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error
-./calc.at:1340: "$PERL" -pi -e 'use strict;
+1.3: syntax error
+./calc.at:1340: cat stderr
+./calc.at:1338: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -28472,15 +28174,23 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1340: cat stderr
 input:
-  | 1 = 2 = 3
+  | error
 ./calc.at:1340:  $PREPARSER ./calc  input
 stderr:
 syntax error
 ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1338: cat stderr
 stderr:
+input:
 syntax error
+  | 1//2
+./calc.at:1338:  $PREPARSER ./calc  input
+stderr:
+1.3: syntax error
+./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.3: syntax error
 ./calc.at:1340: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -28491,17 +28201,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1340: cat stderr
-input:
-  | 
-  | +1
-./calc.at:1340:  $PREPARSER ./calc  input
-stderr:
-syntax error
-./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-./calc.at:1340: "$PERL" -pi -e 'use strict;
+./calc.at:1338: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -28511,43 +28211,117 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+stdout:
+./calc.at:1338: cat stderr
+./types.at:139: ./check
 ./calc.at:1340: cat stderr
-./calc.at:1340:  $PREPARSER ./calc  /dev/null
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+input:
+input:
+  | 1 = 2 = 3
+./calc.at:1340:  $PREPARSER ./calc  input
+  | error
+./calc.at:1338:  $PREPARSER ./calc  input
 stderr:
+1.1: syntax error
+./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 syntax error
 ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stdout:
 stderr:
-./calc.at:1341: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
+stderr:
+1.1: syntax error
 syntax error
-./calc.at:1341: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c
-
+./calc.at:1338: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1340: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./calc.at:1338: cat stderr
 input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
+  | 1 = 2 = 3
+./calc.at:1338:  $PREPARSER ./calc  input
+./calc.at:1340: cat stderr
+stderr:
+1.7: syntax error
+./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
+1.7: syntax error
   | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
+  | +1
+./calc.at:1340:  $PREPARSER ./calc  input
+stderr:
+syntax error
+./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1338: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+syntax error
+./calc.at:1338: cat stderr
+input:
+./calc.at:1340: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
   | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1341:  $PREPARSER ./calc  input
+  | +1
+./calc.at:1338:  $PREPARSER ./calc  input
+stderr:
+2.1: syntax error
+./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1340: cat stderr
+stderr:
+2.1: syntax error
+./calc.at:1340:  $PREPARSER ./calc  /dev/null
 stderr:
+syntax error
+./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1338: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+syntax error
+./calc.at:1338: cat stderr
+./calc.at:1338:  $PREPARSER ./calc  /dev/null
 ./calc.at:1340: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -28558,16 +28332,25 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1341: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+1.1: syntax error
+./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.1: syntax error
 ./calc.at:1340: cat stderr
+./calc.at:1338: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 input:
-input:
-  | 1 2
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 ./calc.at:1340:  $PREPARSER ./calc  input
-./calc.at:1341:  $PREPARSER ./calc  input
 stderr:
 syntax error
 syntax error
@@ -28576,16 +28359,15 @@
 error: 4444 != 1
 ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-stderr:
-syntax error
-./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1338: cat stderr
 syntax error
 syntax error
 syntax error
 syntax error
 error: 4444 != 1
-stderr:
-syntax error
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1338:  $PREPARSER ./calc  input
 ./calc.at:1340: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -28596,7 +28378,21 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1341: "$PERL" -pi -e 'use strict;
+stderr:
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+./calc.at:1340: cat stderr
+./calc.at:1338: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -28606,26 +28402,21 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1340: cat stderr
 input:
-./calc.at:1341: cat stderr
   | (!!) + (1 2) = 1
 ./calc.at:1340:  $PREPARSER ./calc  input
+./calc.at:1338: cat stderr
 stderr:
 syntax error
 error: 2222 != 1
 ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 1//2
-./calc.at:1341:  $PREPARSER ./calc  input
 stderr:
-stderr:
-syntax error
-./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
 syntax error
 error: 2222 != 1
+  | (!!) + (1 2) = 1
+./calc.at:1338:  $PREPARSER ./calc  input
 stderr:
-syntax error
 ./calc.at:1340: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -28636,7 +28427,14 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1341: "$PERL" -pi -e 'use strict;
+1.11: syntax error
+1.1-16: error: 2222 != 1
+./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.11: syntax error
+1.1-16: error: 2222 != 1
+./calc.at:1340: cat stderr
+./calc.at:1338: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -28646,7 +28444,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1340: cat stderr
 input:
   | (- *) + (1 2) = 1
 ./calc.at:1340:  $PREPARSER ./calc  input
@@ -28655,16 +28452,20 @@
 syntax error
 error: 2222 != 1
 ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1341: cat stderr
+./calc.at:1338: cat stderr
 stderr:
 syntax error
 syntax error
 error: 2222 != 1
 input:
-  | error
-./calc.at:1341:  $PREPARSER ./calc  input
+  | (- *) + (1 2) = 1
+./calc.at:1338:  $PREPARSER ./calc  input
+stderr:
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error
 ./calc.at:1340: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -28675,10 +28476,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-./calc.at:1341: "$PERL" -pi -e 'use strict;
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+./calc.at:1338: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -28689,28 +28490,32 @@
   }eg
 ' expout || exit 77
 ./calc.at:1340: cat stderr
-./calc.at:1341: cat stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1341:  $PREPARSER ./calc  input
-stderr:
-syntax error
-./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+./calc.at:1338: cat stderr
 input:
-syntax error
   | (* *) + (*) + (*)
 ./calc.at:1340:  $PREPARSER ./calc  input
 stderr:
+input:
 syntax error
 syntax error
 syntax error
 ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (* *) + (*) + (*)
+./calc.at:1338:  $PREPARSER ./calc  input
 stderr:
+stderr:
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 syntax error
 syntax error
 syntax error
-./calc.at:1340: "$PERL" -pi -e 'use strict;
+stderr:
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+./calc.at:1338: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -28720,7 +28525,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1341: "$PERL" -pi -e 'use strict;
+./calc.at:1340: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -28730,37 +28535,35 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./calc.at:1341: cat stderr
+./calc.at:1338: cat stderr
 ./calc.at:1340: cat stderr
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1338:  $PREPARSER ./calc  input
 input:
-  | 
-  | +1
-./calc.at:1341:  $PREPARSER ./calc  input
+stderr:
   | 1 + 2 * 3 + !+ ++
 ./calc.at:1340:  $PREPARSER ./calc  input
-stderr:
-syntax error
-./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
 ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1338: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 stderr:
 ./calc.at:1340: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-syntax error
 input:
   | 1 + 2 * 3 + !- ++
+./calc.at:1338:  $PREPARSER ./calc  input
+stderr:
+input:
+./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 + !- ++
 ./calc.at:1340:  $PREPARSER ./calc  input
 stderr:
+stderr:
 ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1341: "$PERL" -pi -e 'use strict;
+stderr:
+./calc.at:1338: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -28770,17 +28573,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-./calc.at:1341: cat stderr
-./calc.at:1341:  $PREPARSER ./calc  /dev/null
-stderr:
-stderr:
-syntax error
-./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stdout:
-syntax error
-./types.at:139: ./check
+./calc.at:1338: cat stderr
 ./calc.at:1340: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -28791,8 +28584,20 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./calc.at:1341: "$PERL" -pi -e 'use strict;
+input:
+  | 1 + 2 * 3 + !* ++
+./calc.at:1338:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1340: cat stderr
+1.14: memory exhausted
+./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.14: memory exhausted
+input:
+  | 1 + 2 * 3 + !* ++
+./calc.at:1340:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1338: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -28802,33 +28607,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1340: cat stderr
-./calc.at:1341: cat stderr
-input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1340:  $PREPARSER ./calc  input
-stderr:
 memory exhausted
 ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
 stderr:
 memory exhausted
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1341:  $PREPARSER ./calc  input
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-stderr:
-syntax error
-syntax error
-syntax error
-syntax error
-error: 4444 != 1
-./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-syntax error
-syntax error
-syntax error
-error: 4444 != 1
+./calc.at:1338: cat stderr
+input:
 ./calc.at:1340: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -28839,7 +28623,17 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1341: "$PERL" -pi -e 'use strict;
+  | (#) + (#) = 2222
+./calc.at:1338:  $PREPARSER ./calc  input
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1340: cat stderr
+./calc.at:1338: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -28849,8 +28643,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1340: cat stderr
-./calc.at:1341: cat stderr
 input:
   | (#) + (#) = 2222
 ./calc.at:1340:  $PREPARSER ./calc  input
@@ -28858,19 +28650,11 @@
 syntax error: invalid character: '#'
 syntax error: invalid character: '#'
 ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
 stderr:
-  | (!!) + (1 2) = 1
-./calc.at:1341:  $PREPARSER ./calc  input
 syntax error: invalid character: '#'
 syntax error: invalid character: '#'
-stderr:
-syntax error
-error: 2222 != 1
-./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-error: 2222 != 1
+./calc.at:1338: cat stderr
+input:
 ./calc.at:1340: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -28881,7 +28665,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1341: "$PERL" -pi -e 'use strict;
+  | (1 + #) = 1111
+./calc.at:1338:  $PREPARSER ./calc  input
+stderr:
+1.6: syntax error: invalid character: '#'
+./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1340: cat stderr
+1.6: syntax error: invalid character: '#'
+input:
+./calc.at:1338: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -28891,24 +28684,21 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1340: cat stderr
-input:
   | (1 + #) = 1111
 ./calc.at:1340:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1341: cat stderr
 syntax error: invalid character: '#'
 ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1338: cat stderr
 stderr:
-syntax error: invalid character: '#'
 input:
-  | (- *) + (1 2) = 1
-./calc.at:1341:  $PREPARSER ./calc  input
+syntax error: invalid character: '#'
+  | (# + 1) = 1111
+./calc.at:1338:  $PREPARSER ./calc  input
+stderr:
+1.2: syntax error: invalid character: '#'
+./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error
-syntax error
-error: 2222 != 1
-./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1340: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -28919,11 +28709,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-syntax error
-syntax error
-error: 2222 != 1
-./calc.at:1341: "$PERL" -pi -e 'use strict;
+1.2: syntax error: invalid character: '#'
+./calc.at:1338: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -28938,19 +28725,19 @@
   | (# + 1) = 1111
 ./calc.at:1340:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1338: cat stderr
 syntax error: invalid character: '#'
 ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1341: cat stderr
 stderr:
-syntax error: invalid character: '#'
 input:
-  | (* *) + (*) + (*)
-./calc.at:1341:  $PREPARSER ./calc  input
+syntax error: invalid character: '#'
+  | (1 + # + 1) = 1111
+./calc.at:1338:  $PREPARSER ./calc  input
 stderr:
-syntax error
-syntax error
-syntax error
-./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.6: syntax error: invalid character: '#'
+./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.6: syntax error: invalid character: '#'
 ./calc.at:1340: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -28961,12 +28748,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-syntax error
-syntax error
-syntax error
-./calc.at:1340: cat stderr
-./calc.at:1341: "$PERL" -pi -e 'use strict;
+./calc.at:1338: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -28976,48 +28758,29 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1341: cat stderr
+./calc.at:1340: cat stderr
+./calc.at:1338: cat stderr
 input:
   | (1 + # + 1) = 1111
 ./calc.at:1340:  $PREPARSER ./calc  input
 stderr:
+input:
 syntax error: invalid character: '#'
 ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1338:  $PREPARSER ./calc  input
 stderr:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1341:  $PREPARSER ./calc  input
 syntax error: invalid character: '#'
 stderr:
-./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1340: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-./calc.at:1341: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-./calc.at:1340: cat stderr
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1341:  $PREPARSER ./calc  input
-stderr:
-input:
-./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (1 + 1) / (1 - 1)
-./calc.at:1340:  $PREPARSER ./calc  input
 stderr:
+1.11-17: error: null divisor
+./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stdout:
+./types.at:139:  $PREPARSER ./test
 stderr:
-error: null divisor
-./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-error: null divisor
-./calc.at:1341: "$PERL" -pi -e 'use strict;
+1.11-17: error: null divisor
+./calc.at:1340: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -29027,7 +28790,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1340: "$PERL" -pi -e 'use strict;
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1338: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -29037,19 +28801,21 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+======== Testing with C++ standard flags: ''
 ./calc.at:1340: cat stderr
-./calc.at:1341: cat stderr
-474. calc.at:1340:  ok
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1341:  $PREPARSER ./calc  input
+./calc.at:1338: cat stderr
+  | (1 + 1) / (1 - 1)
+./calc.at:1340:  $PREPARSER ./calc  input
 stderr:
-memory exhausted
-./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+error: null divisor
+./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+473. calc.at:1338:  ok
 stderr:
-memory exhausted
+error: null divisor
 
-./calc.at:1341: "$PERL" -pi -e 'use strict;
+./calc.at:1340: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -29059,18 +28825,19 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1341: cat stderr
-input:
-  | (#) + (#) = 2222
-./calc.at:1341:  $PREPARSER ./calc  input
+./calc.at:1340: cat stderr
+474. calc.at:1340:  ok
+475. calc.at:1341: testing Calculator %verbose  ...
+./calc.at:1341: mv calc.y.tmp calc.y
+
+./calc.at:1341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
 stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stdout:
+
+./types.at:139: ./check
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./calc.at:1341: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
 476. calc.at:1342: testing Calculator %yacc  ...
-stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
 ./calc.at:1342:
     if "$POSIXLY_CORRECT_IS_EXPORTED"; then
       sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y
@@ -29080,100 +28847,8 @@
 
 
 ./calc.at:1342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-./calc.at:1341: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1341: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1341:  $PREPARSER ./calc  input
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./calc.at:1342: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
-syntax error: invalid character: '#'
-./calc.at:1341: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1341: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1341:  $PREPARSER ./calc  input
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1341: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1341: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1341:  $PREPARSER ./calc  input
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1341: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1341: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1341:  $PREPARSER ./calc  input
-stderr:
-error: null divisor
-./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-error: null divisor
-./calc.at:1341: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1341: cat stderr
-475. calc.at:1341:  ok
-
-477. calc.at:1343: testing Calculator parse.error=detailed  ...
-./calc.at:1343: mv calc.y.tmp calc.y
-
-./calc.at:1343: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
 stderr:
 stdout:
 ./calc.at:1342: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
@@ -29211,11 +28886,43 @@
   | 1 2
 ./calc.at:1342:  $PREPARSER ./calc  input
 stderr:
+stderr:
+stdout:
+./calc.at:1341: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
 syntax error
 ./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1341: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c
+
 stderr:
 syntax error
-./calc.at:1343: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
+input:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1341:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1341: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 ./calc.at:1342: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29226,8 +28933,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+  | 1 2
+./calc.at:1341:  $PREPARSER ./calc  input
+stderr:
 ./calc.at:1342: cat stderr
+syntax error
+./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 input:
+syntax error
   | 1//2
 ./calc.at:1342:  $PREPARSER ./calc  input
 stderr:
@@ -29235,6 +28950,16 @@
 ./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 syntax error
+./calc.at:1341: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1342: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29245,15 +28970,41 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1341: cat stderr
+stderr:
+stdout:
+./types.at:139:  $PREPARSER ./test
+input:
+  | 1//2
+./calc.at:1341:  $PREPARSER ./calc  input
+stderr:
+stderr:
+syntax error
+./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1342: cat stderr
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 input:
+syntax error
+447. types.at:139:  ok
   | error
 ./calc.at:1342:  $PREPARSER ./calc  input
 stderr:
 syntax error
 ./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+
 syntax error
+./calc.at:1341: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1342: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29264,15 +29015,39 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1341: cat stderr
+input:
 ./calc.at:1342: cat stderr
+  | error
+./calc.at:1341:  $PREPARSER ./calc  input
+stderr:
+477. calc.at:1343: testing Calculator parse.error=detailed  ...
+./calc.at:1343: mv calc.y.tmp calc.y
+
 input:
+syntax error
+./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1343: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
   | 1 = 2 = 3
 ./calc.at:1342:  $PREPARSER ./calc  input
 stderr:
+stderr:
+syntax error
 syntax error
 ./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 syntax error
+./calc.at:1341: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
 ./calc.at:1342: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29283,16 +29058,32 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1341: cat stderr
+stdout:
 ./calc.at:1342: cat stderr
+./calc.at:1343: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
+./types.at:139:  $PREPARSER ./test
+stderr:
 input:
+input:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 = 2 = 3
+./calc.at:1341:  $PREPARSER ./calc  input
   | 
   | +1
+stderr:
 ./calc.at:1342:  $PREPARSER ./calc  input
+======== Testing with C++ standard flags: ''
+syntax error
+./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 syntax error
 ./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+stderr:
 stderr:
 syntax error
+syntax error
 ./calc.at:1342: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29303,11 +29094,31 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1341: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1342: cat stderr
+./calc.at:1341: cat stderr
 ./calc.at:1342:  $PREPARSER ./calc  /dev/null
 stderr:
+input:
 syntax error
 ./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 
+  | +1
+./calc.at:1341:  $PREPARSER ./calc  input
+stderr:
+stderr:
+syntax error
+syntax error
+./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 syntax error
 ./calc.at:1342: "$PERL" -pi -e 'use strict;
@@ -29321,6 +29132,16 @@
   }eg
 ' expout || exit 77
 ./calc.at:1342: cat stderr
+./calc.at:1341: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 input:
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 ./calc.at:1342:  $PREPARSER ./calc  input
@@ -29331,12 +29152,22 @@
 syntax error
 error: 4444 != 1
 ./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1341: cat stderr
+stderr:
 stderr:
 syntax error
 syntax error
 syntax error
 syntax error
 error: 4444 != 1
+stdout:
+./calc.at:1341:  $PREPARSER ./calc  /dev/null
+./types.at:139: ./check
+stderr:
+syntax error
+./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+stderr:
 ./calc.at:1342: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29347,17 +29178,46 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+syntax error
 ./calc.at:1342: cat stderr
+./calc.at:1341: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 input:
   | (!!) + (1 2) = 1
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./calc.at:1342:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1341: cat stderr
 syntax error
 error: 2222 != 1
 ./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 syntax error
 error: 2222 != 1
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1341:  $PREPARSER ./calc  input
+stderr:
+syntax error
+syntax error
+syntax error
+syntax error
+error: 4444 != 1
+./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+syntax error
+syntax error
+syntax error
+syntax error
+error: 4444 != 1
 ./calc.at:1342: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29368,19 +29228,38 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1341: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1342: cat stderr
 input:
   | (- *) + (1 2) = 1
 ./calc.at:1342:  $PREPARSER ./calc  input
+./calc.at:1341: cat stderr
 stderr:
 syntax error
 syntax error
 error: 2222 != 1
 ./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
 stderr:
+  | (!!) + (1 2) = 1
+./calc.at:1341:  $PREPARSER ./calc  input
 syntax error
 syntax error
 error: 2222 != 1
+stderr:
+syntax error
+error: 2222 != 1
+./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./calc.at:1342: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29391,20 +29270,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1342: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1342:  $PREPARSER ./calc  input
-stderr:
-syntax error
-syntax error
-syntax error
-./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
 syntax error
-syntax error
-./calc.at:1342: "$PERL" -pi -e 'use strict;
+error: 2222 != 1
+./calc.at:1341: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -29416,17 +29284,26 @@
 ' expout || exit 77
 ./calc.at:1342: cat stderr
 input:
-  | 1 + 2 * 3 + !+ ++
+./calc.at:1341: cat stderr
+  | (* *) + (*) + (*)
 ./calc.at:1342:  $PREPARSER ./calc  input
 stderr:
+syntax error
+syntax error
+syntax error
 ./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1342: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+syntax error
+syntax error
+syntax error
 input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1342:  $PREPARSER ./calc  input
+  | (- *) + (1 2) = 1
+./calc.at:1341:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
+syntax error
+error: 2222 != 1
+./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 ./calc.at:1342: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
@@ -29438,16 +29315,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+syntax error
+syntax error
+error: 2222 != 1
 ./calc.at:1342: cat stderr
-input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1342:  $PREPARSER ./calc  input
-stderr:
-memory exhausted
-./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-memory exhausted
-./calc.at:1342: "$PERL" -pi -e 'use strict;
+./calc.at:1341: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -29457,37 +29329,33 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1342: cat stderr
 input:
-  | (#) + (#) = 2222
+  | 1 + 2 * 3 + !+ ++
 ./calc.at:1342:  $PREPARSER ./calc  input
 stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
+./calc.at:1341: cat stderr
 ./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1342: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1342: cat stderr
+./calc.at:1342: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 input:
-  | (1 + #) = 1111
+  | (* *) + (*) + (*)
+./calc.at:1341:  $PREPARSER ./calc  input
+stderr:
+syntax error
+syntax error
+syntax error
+input:
+./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 + !- ++
+stderr:
 ./calc.at:1342:  $PREPARSER ./calc  input
+syntax error
+syntax error
+syntax error
 stderr:
-syntax error: invalid character: '#'
 ./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1342: "$PERL" -pi -e 'use strict;
+./calc.at:1341: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -29497,18 +29365,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1342: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1342:  $PREPARSER ./calc  input
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error: invalid character: '#'
 stderr:
 stdout:
 ./calc.at:1343: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
+./calc.at:1341: cat stderr
 ./calc.at:1342: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29530,9 +29390,10 @@
         || /\t/
         )' calc.c
 
-./calc.at:1342: cat stderr
 input:
 input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1341:  $PREPARSER ./calc  input
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -29546,24 +29407,37 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-  | (1 + # + 1) = 1111
 ./calc.at:1343:  $PREPARSER ./calc  input
-./calc.at:1342:  $PREPARSER ./calc  input
 stderr:
 stderr:
+./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1342: cat stderr
 stderr:
 ./calc.at:1343: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-syntax error: invalid character: '#'
-./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 1 + 2 * 3 + !* ++
+./calc.at:1342:  $PREPARSER ./calc  input
 stderr:
-syntax error: invalid character: '#'
+stderr:
+./calc.at:1341: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+memory exhausted
+./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
+stderr:
   | 1 2
 ./calc.at:1343:  $PREPARSER ./calc  input
 stderr:
+memory exhausted
 syntax error, unexpected number
 ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1341:  $PREPARSER ./calc  input
+stderr:
+stderr:
+syntax error, unexpected number
+./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1342: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29575,8 +29449,6 @@
   }eg
 ' expout || exit 77
 stderr:
-syntax error, unexpected number
-./calc.at:1342: cat stderr
 ./calc.at:1343: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29587,21 +29459,42 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1342: cat stderr
+./calc.at:1341: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1343: cat stderr
 input:
-  | (1 + 1) / (1 - 1)
+  | (#) + (#) = 2222
 ./calc.at:1342:  $PREPARSER ./calc  input
 stderr:
-error: null divisor
-./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1343: cat stderr
-stderr:
-error: null divisor
 input:
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 1//2
 ./calc.at:1343:  $PREPARSER ./calc  input
+./calc.at:1341: cat stderr
+stderr:
 stderr:
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
 syntax error, unexpected '/', expecting number or '-' or '(' or '!'
 ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
+  | 1 + 2 * 3 + !* ++
+./calc.at:1341:  $PREPARSER ./calc  input
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+stderr:
+memory exhausted
 ./calc.at:1342: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29612,10 +29505,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1342: cat stderr
-476. calc.at:1342:  ok
+memory exhausted
 ./calc.at:1343: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29626,16 +29518,55 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1342: cat stderr
 ./calc.at:1343: cat stderr
+./calc.at:1341: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+  | (1 + #) = 1111
+./calc.at:1342:  $PREPARSER ./calc  input
 input:
+stderr:
   | error
 ./calc.at:1343:  $PREPARSER ./calc  input
+./calc.at:1341: cat stderr
 stderr:
-
+syntax error: invalid character: '#'
+./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 syntax error, unexpected invalid token
 ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+syntax error: invalid character: '#'
+input:
+stderr:
+  | (#) + (#) = 2222
+./calc.at:1341:  $PREPARSER ./calc  input
 syntax error, unexpected invalid token
+stderr:
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1342: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
 ./calc.at:1343: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29647,18 +29578,50 @@
   }eg
 ' expout || exit 77
 ./calc.at:1343: cat stderr
+./calc.at:1342: cat stderr
+./calc.at:1341: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
 input:
   | 1 = 2 = 3
 ./calc.at:1343:  $PREPARSER ./calc  input
+  | (# + 1) = 1111
+./calc.at:1342:  $PREPARSER ./calc  input
+stderr:
 stderr:
+syntax error: invalid character: '#'
+./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 syntax error, unexpected '='
 ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-478. calc.at:1344: testing Calculator parse.error=verbose  ...
-./calc.at:1344: mv calc.y.tmp calc.y
-
+./calc.at:1341: cat stderr
+stderr:
 stderr:
 syntax error, unexpected '='
-./calc.at:1344: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+syntax error: invalid character: '#'
+input:
+  | (1 + #) = 1111
+./calc.at:1341:  $PREPARSER ./calc  input
+stderr:
+syntax error: invalid character: '#'
+./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1342: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1343: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29669,7 +29632,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
 ./calc.at:1343: cat stderr
+syntax error: invalid character: '#'
+./calc.at:1342: cat stderr
 input:
   | 
   | +1
@@ -29679,7 +29645,22 @@
 ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 syntax error, unexpected '+'
-./calc.at:1344: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
+input:
+  | (1 + # + 1) = 1111
+./calc.at:1342:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1341: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+syntax error: invalid character: '#'
+./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1343: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29690,13 +29671,33 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
 ./calc.at:1343: cat stderr
+./calc.at:1341: cat stderr
+syntax error: invalid character: '#'
 ./calc.at:1343:  $PREPARSER ./calc  /dev/null
 stderr:
 syntax error, unexpected end of file
 ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
 stderr:
 syntax error, unexpected end of file
+  | (# + 1) = 1111
+./calc.at:1341:  $PREPARSER ./calc  input
+stderr:
+syntax error: invalid character: '#'
+./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1342: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1343: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29707,10 +29708,33 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+syntax error: invalid character: '#'
 ./calc.at:1343: cat stderr
+./calc.at:1342: cat stderr
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1342:  $PREPARSER ./calc  input
+stderr:
+error: null divisor
+./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1341: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+error: null divisor
 input:
+stderr:
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+stdout:
 ./calc.at:1343:  $PREPARSER ./calc  input
+./types.at:139:  $PREPARSER ./test
 stderr:
 syntax error, unexpected ')', expecting number or '-' or '(' or '!'
 syntax error, unexpected ')', expecting number or '-' or '(' or '!'
@@ -29718,12 +29742,29 @@
 syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 error: 4444 != 1
 ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1342: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
 stderr:
 syntax error, unexpected ')', expecting number or '-' or '(' or '!'
 syntax error, unexpected ')', expecting number or '-' or '(' or '!'
 syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 error: 4444 != 1
+./calc.at:1341: cat stderr
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+448. types.at:139:  ok
+  | (1 + # + 1) = 1111
+./calc.at:1341:  $PREPARSER ./calc  input
 ./calc.at:1343: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29734,7 +29775,15 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1342: cat stderr
+stderr:
+syntax error: invalid character: '#'
 ./calc.at:1343: cat stderr
+./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
+stderr:
+syntax error: invalid character: '#'
+476. calc.at:1342:  ok
 input:
   | (!!) + (1 2) = 1
 ./calc.at:1343:  $PREPARSER ./calc  input
@@ -29743,6 +29792,17 @@
 error: 2222 != 1
 ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+
+./calc.at:1341: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 syntax error, unexpected number
 error: 2222 != 1
 ./calc.at:1343: "$PERL" -pi -e 'use strict;
@@ -29755,20 +29815,48 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+478. calc.at:1344: testing Calculator parse.error=verbose  ...
+./calc.at:1344: mv calc.y.tmp calc.y
+
+./calc.at:1341: cat stderr
+./calc.at:1344: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+input:
+  | (1 + 1) / (1 - 1)
 ./calc.at:1343: cat stderr
+./calc.at:1341:  $PREPARSER ./calc  input
+stderr:
+error: null divisor
+./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
   | (- *) + (1 2) = 1
 ./calc.at:1343:  $PREPARSER ./calc  input
 stderr:
+stderr:
 syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 syntax error, unexpected number
 error: 2222 != 1
+error: null divisor
 ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 syntax error, unexpected number
 error: 2222 != 1
-./calc.at:1343: "$PERL" -pi -e 'use strict;
+./calc.at:1341: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+479. calc.at:1346: testing Calculator api.pure=full %locations  ...
+./calc.at:1346: mv calc.y.tmp calc.y
+
+./calc.at:1346: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+./calc.at:1341: cat stderr
+475. calc.at:1341: ./calc.at:1343: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -29778,7 +29866,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+ ok
+./calc.at:1344: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
 ./calc.at:1343: cat stderr
+
 input:
   | (* *) + (*) + (*)
 ./calc.at:1343:  $PREPARSER ./calc  input
@@ -29791,6 +29882,7 @@
 syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+./calc.at:1346: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
 ./calc.at:1343: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29801,6 +29893,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+480. calc.at:1347: testing Calculator api.push-pull=both api.pure=full %locations  ...
+./calc.at:1347: mv calc.y.tmp calc.y
+
+./calc.at:1347: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
 ./calc.at:1343: cat stderr
 input:
   | 1 + 2 * 3 + !+ ++
@@ -29815,6 +29911,7 @@
 stderr:
 ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1347: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
 ./calc.at:1343: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29911,7 +30008,21 @@
 syntax error: invalid character: '#'
 ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
+stdout:
 syntax error: invalid character: '#'
+./calc.at:1347: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
+./calc.at:1347: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c
+
 ./calc.at:1343: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29922,15 +30033,56 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
 ./calc.at:1343: cat stderr
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1347:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
+stderr:
   | (1 + 1) / (1 - 1)
 ./calc.at:1343:  $PREPARSER ./calc  input
+./calc.at:1347: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 stderr:
 error: null divisor
 ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 1 2
+./calc.at:1347:  $PREPARSER ./calc  input
+stderr:
 stderr:
 error: null divisor
+1.3: syntax error
+./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.3: syntax error
+stderr:
+stdout:
+./calc.at:1344: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
+./calc.at:1344: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c
+
 ./calc.at:1343: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -29941,11 +30093,42 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1343: cat stderr
 stderr:
+./calc.at:1347: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1343: cat stderr
+input:
 stdout:
-./calc.at:1344: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
-477. calc.at:1343: ./calc.at:1344: "$PERL" -ne '
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1344:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1346: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
+stderr:
+477. calc.at:1343:  ok
+./calc.at:1347: cat stderr
+./calc.at:1344: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+./calc.at:1346: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -29956,8 +30139,13 @@
         || /\t/
         )' calc.c
 
- ok
 input:
+  | 1//2
+./calc.at:1347:  $PREPARSER ./calc  input
+stderr:
+input:
+1.3: syntax error
+./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -29971,25 +30159,21 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1344:  $PREPARSER ./calc  input
+./calc.at:1346:  $PREPARSER ./calc  input
+input:
 stderr:
-
-./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1344: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
   | 1 2
 ./calc.at:1344:  $PREPARSER ./calc  input
 stderr:
-479. calc.at:1346: testing Calculator api.pure=full %locations  ...
+./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.3: syntax error
 syntax error, unexpected number
 ./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1346: mv calc.y.tmp calc.y
-
-./calc.at:1346: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
 stderr:
-syntax error, unexpected number
-./calc.at:1344: "$PERL" -pi -e 'use strict;
+./calc.at:1346: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+
+./calc.at:1347: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -29999,15 +30183,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1344: cat stderr
+stderr:
 input:
-  | 1//2
-./calc.at:1344:  $PREPARSER ./calc  input
+syntax error, unexpected number
+  | 1 2
+./calc.at:1346:  $PREPARSER ./calc  input
 stderr:
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.3: syntax error
+./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1347: cat stderr
 stderr:
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
 ./calc.at:1344: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -30018,17 +30203,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+1.3: syntax error
 ./calc.at:1344: cat stderr
 input:
   | error
-./calc.at:1346: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
-./calc.at:1344:  $PREPARSER ./calc  input
-stderr:
-syntax error, unexpected invalid token
-./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error, unexpected invalid token
-./calc.at:1344: "$PERL" -pi -e 'use strict;
+./calc.at:1347:  $PREPARSER ./calc  input
+./calc.at:1346: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30038,16 +30218,27 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1344: cat stderr
+481. calc.at:1348: testing Calculator parse.error=detailed %locations  ...
+stderr:
 input:
-  | 1 = 2 = 3
+./calc.at:1348: mv calc.y.tmp calc.y
+
+1.1: syntax error
+./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1//2
+stderr:
 ./calc.at:1344:  $PREPARSER ./calc  input
+./calc.at:1348: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+./calc.at:1346: cat stderr
+1.1: syntax error
 stderr:
-syntax error, unexpected '='
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
 ./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 1//2
+./calc.at:1346:  $PREPARSER ./calc  input
 stderr:
-syntax error, unexpected '='
-./calc.at:1344: "$PERL" -pi -e 'use strict;
+./calc.at:1347: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30057,16 +30248,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1344: cat stderr
-input:
-  | 
-  | +1
-./calc.at:1344:  $PREPARSER ./calc  input
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
 stderr:
-syntax error, unexpected '+'
-./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.3: syntax error
+./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error, unexpected '+'
+1.3: syntax error
+./calc.at:1347: cat stderr
 ./calc.at:1344: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -30077,21 +30265,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1344: cat stderr
-./calc.at:1344:  $PREPARSER ./calc  /dev/null
-stderr:
-stderr:
-stdout:
-syntax error, unexpected end of input
-./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./types.at:139:  $PREPARSER ./test
-stderr:
-stderr:
-syntax error, unexpected end of input
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-./calc.at:1344: "$PERL" -pi -e 'use strict;
+./calc.at:1348: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
+./calc.at:1346: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30101,45 +30276,32 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+  | 1 = 2 = 3
+./calc.at:1347:  $PREPARSER ./calc  input
 ./calc.at:1344: cat stderr
+stderr:
+1.7: syntax error
+./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1346: cat stderr
+  | error
 ./calc.at:1344:  $PREPARSER ./calc  input
 stderr:
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-error: 4444 != 1
-./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-error: 4444 != 1
-./calc.at:1344: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1344: cat stderr
 input:
-  | (!!) + (1 2) = 1
-./calc.at:1344:  $PREPARSER ./calc  input
-stderr:
-syntax error, unexpected number
-error: 2222 != 1
+syntax error, unexpected invalid token
 ./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | error
+./calc.at:1346:  $PREPARSER ./calc  input
+1.7: syntax error
 stderr:
-syntax error, unexpected number
-error: 2222 != 1
-./calc.at:1344: "$PERL" -pi -e 'use strict;
+1.1: syntax error
+./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+syntax error, unexpected invalid token
+stderr:
+./calc.at:1347: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30149,20 +30311,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1344: cat stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1344:  $PREPARSER ./calc  input
-stderr:
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected number
-error: 2222 != 1
-./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected number
-error: 2222 != 1
-./calc.at:1344: "$PERL" -pi -e 'use strict;
+1.1: syntax error
+./calc.at:1347: cat stderr
+./calc.at:1346: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30172,23 +30323,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1344: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1344:  $PREPARSER ./calc  input
-stderr:
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-stderr:
-stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./calc.at:1346: cat stderr
 ./calc.at:1344: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -30199,62 +30334,31 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1344: cat stderr
 input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1344:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 
+  | +1
+./calc.at:1347:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1344: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1344:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1344: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+  | 1 = 2 = 3
+./calc.at:1346:  $PREPARSER ./calc  input
+2.1: syntax error
+./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1344: cat stderr
-input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1344:  $PREPARSER ./calc  input
 stderr:
-memory exhausted
-./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+1.7: syntax error
+./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-memory exhausted
-./calc.at:1344: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1344: cat stderr
+2.1: syntax error
 input:
-  | (#) + (#) = 2222
+  | 1 = 2 = 3
 ./calc.at:1344:  $PREPARSER ./calc  input
 stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1344: "$PERL" -pi -e 'use strict;
+1.7: syntax error
+syntax error, unexpected '='
+./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1347: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30264,16 +30368,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1344: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1344:  $PREPARSER ./calc  input
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1344: "$PERL" -pi -e 'use strict;
+./calc.at:1347: cat stderr
+syntax error, unexpected '='
+./calc.at:1346: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30283,15 +30381,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1344: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1344:  $PREPARSER ./calc  input
+./calc.at:1347:  $PREPARSER ./calc  /dev/null
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.1: syntax error
+./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
+1.1: syntax error
+./calc.at:1346: cat stderr
 ./calc.at:1344: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -30303,48 +30399,7 @@
   }eg
 ' expout || exit 77
 ./calc.at:1344: cat stderr
-stderr:
-input:
-stdout:
-  | (1 + # + 1) = 1111
-./calc.at:1344:  $PREPARSER ./calc  input
-./calc.at:1346: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1346: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c
-
-stderr:
-syntax error: invalid character: '#'
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1346:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1346: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-./calc.at:1344: "$PERL" -pi -e 'use strict;
+./calc.at:1347: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30354,23 +30409,35 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1344: cat stderr
 input:
-  | 1 2
+  | 
+  | +1
 ./calc.at:1346:  $PREPARSER ./calc  input
 stderr:
 input:
-1.3: syntax error
+./calc.at:1347: cat stderr
+2.1: syntax error
 ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (1 + 1) / (1 - 1)
+  | 
+  | +1
 ./calc.at:1344:  $PREPARSER ./calc  input
 stderr:
-stderr:
-error: null divisor
+syntax error, unexpected '+'
 ./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.3: syntax error
+input:
 stderr:
-error: null divisor
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1347:  $PREPARSER ./calc  input
+stderr:
+stderr:
+syntax error, unexpected '+'
+2.1: syntax error
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1346: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -30381,6 +30448,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
 ./calc.at:1344: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -30391,19 +30464,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1346: cat stderr
 ./calc.at:1344: cat stderr
-input:
-478. calc.at:1344:  ok
-  | 1//2
-./calc.at:1346:  $PREPARSER ./calc  input
-stderr:
-1.3: syntax error
-./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-
-1.3: syntax error
-./calc.at:1346: "$PERL" -pi -e 'use strict;
+./calc.at:1344:  $PREPARSER ./calc  /dev/null
+./calc.at:1347: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30413,20 +30476,24 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+syntax error, unexpected end of input
+./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1346: cat stderr
-input:
-480. calc.at:1347: testing Calculator api.push-pull=both api.pure=full %locations  ...
-./calc.at:1347: mv calc.y.tmp calc.y
-
-./calc.at:1347: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-  | error
-./calc.at:1346:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1346:  $PREPARSER ./calc  /dev/null
+syntax error, unexpected end of input
+./calc.at:1347: cat stderr
 stderr:
 1.1: syntax error
 ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
 stderr:
+  | (!!) + (1 2) = 1
+./calc.at:1347:  $PREPARSER ./calc  input
 1.1: syntax error
-./calc.at:1346: "$PERL" -pi -e 'use strict;
+stderr:
+./calc.at:1344: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30436,16 +30503,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1346: cat stderr
-./calc.at:1347: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
-input:
-  | 1 = 2 = 3
-./calc.at:1346:  $PREPARSER ./calc  input
-stderr:
-1.7: syntax error
-./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.11: syntax error
+1.1-16: error: 2222 != 1
+./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.7: syntax error
 ./calc.at:1346: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -30456,17 +30517,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1346: cat stderr
+./calc.at:1344: cat stderr
+1.11: syntax error
+1.1-16: error: 2222 != 1
 input:
-  | 
-  | +1
-./calc.at:1346:  $PREPARSER ./calc  input
-stderr:
-2.1: syntax error
-./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-2.1: syntax error
-./calc.at:1346: "$PERL" -pi -e 'use strict;
+./calc.at:1347: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30477,40 +30532,48 @@
   }eg
 ' expout || exit 77
 ./calc.at:1346: cat stderr
-./calc.at:1346:  $PREPARSER ./calc  /dev/null
-stderr:
-1.1: syntax error
-./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1344:  $PREPARSER ./calc  input
 stderr:
-1.1: syntax error
-./calc.at:1346: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1346: cat stderr
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+error: 4444 != 1
+./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
+./calc.at:1347: cat stderr
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 ./calc.at:1346:  $PREPARSER ./calc  input
 stderr:
+stderr:
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+error: 4444 != 1
+input:
 1.2: syntax error
 1.18: syntax error
 1.23: syntax error
 1.41: syntax error
 1.1-46: error: 4444 != 1
 ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (- *) + (1 2) = 1
+./calc.at:1347:  $PREPARSER ./calc  input
 stderr:
 1.2: syntax error
 1.18: syntax error
 1.23: syntax error
 1.41: syntax error
 1.1-46: error: 4444 != 1
-./calc.at:1346: "$PERL" -pi -e 'use strict;
+stderr:
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1344: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30520,17 +30583,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1346: cat stderr
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1346:  $PREPARSER ./calc  input
-stderr:
-1.11: syntax error
-1.1-16: error: 2222 != 1
-./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.11: syntax error
-1.1-16: error: 2222 != 1
 ./calc.at:1346: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -30541,20 +30593,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1346: cat stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1346:  $PREPARSER ./calc  input
-stderr:
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 1.4: syntax error
 1.12: syntax error
 1.1-17: error: 2222 != 1
-./calc.at:1346: "$PERL" -pi -e 'use strict;
+./calc.at:1344: cat stderr
+./calc.at:1347: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30566,67 +30609,30 @@
 ' expout || exit 77
 ./calc.at:1346: cat stderr
 input:
-  | (* *) + (*) + (*)
-./calc.at:1346:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-./calc.at:1346: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1346: cat stderr
+  | (!!) + (1 2) = 1
 input:
-  | 1 + 2 * 3 + !+ ++
+./calc.at:1344:  $PREPARSER ./calc  input
+  | (!!) + (1 2) = 1
 ./calc.at:1346:  $PREPARSER ./calc  input
+./calc.at:1347: cat stderr
 stderr:
-./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1346: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-input:
-stderr:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1346:  $PREPARSER ./calc  input
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-======== Testing with C++ standard flags: ''
+syntax error, unexpected number
+error: 2222 != 1
+1.11: syntax error
+1.1-16: error: 2222 != 1
+./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
-./calc.at:1346: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1346: cat stderr
-input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1346:  $PREPARSER ./calc  input
 stderr:
-1.14: memory exhausted
-./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+syntax error, unexpected number
+error: 2222 != 1
+1.11: syntax error
+1.1-16: error: 2222 != 1
+  | (* *) + (*) + (*)
+./calc.at:1347:  $PREPARSER ./calc  input
 stderr:
-1.14: memory exhausted
 ./calc.at:1346: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -30637,22 +30643,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1346: cat stderr
-input:
-  | (#) + (#) = 2222
-./calc.at:1346:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-stdout:
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./calc.at:1346: "$PERL" -pi -e 'use strict;
+./calc.at:1344: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30662,17 +30658,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+./calc.at:1344: cat stderr
 ./calc.at:1346: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1346:  $PREPARSER ./calc  input
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-1.6: syntax error: invalid character: '#'
-./calc.at:1346: "$PERL" -pi -e 'use strict;
+./calc.at:1347: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30682,16 +30674,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1346: cat stderr
-stderr:
 stdout:
+./calc.at:1348: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
 input:
-  | (# + 1) = 1111
-./calc.at:1347: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
-./calc.at:1346:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error: invalid character: '#'
-./calc.at:1347: "$PERL" -ne '
+  | (- *) + (1 2) = 1
+./calc.at:1344:  $PREPARSER ./calc  input
+./calc.at:1348: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -30702,10 +30690,20 @@
         || /\t/
         )' calc.c
 
-./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
+  | (- *) + (1 2) = 1
+./calc.at:1346:  $PREPARSER ./calc  input
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected number
+error: 2222 != 1
+./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
 input:
-1.2: syntax error: invalid character: '#'
+./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -30719,12 +30717,27 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
+./calc.at:1348:  $PREPARSER ./calc  input
+./calc.at:1347: cat stderr
+stderr:
+input:
+stderr:
+  | 1 + 2 * 3 + !+ ++
 ./calc.at:1347:  $PREPARSER ./calc  input
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected number
+error: 2222 != 1
+./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1347: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-./calc.at:1346: "$PERL" -pi -e 'use strict;
+stderr:
+./calc.at:1348: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+./calc.at:1344: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30734,22 +30747,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-./calc.at:1346: cat stderr
   | 1 2
-./calc.at:1347:  $PREPARSER ./calc  input
-stderr:
-1.3: syntax error
-./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-input:
-1.3: syntax error
-  | (1 + # + 1) = 1111
-./calc.at:1346:  $PREPARSER ./calc  input
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1347: "$PERL" -pi -e 'use strict;
+./calc.at:1348:  $PREPARSER ./calc  input
+./calc.at:1346: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30760,8 +30760,20 @@
   }eg
 ' expout || exit 77
 stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1346: "$PERL" -pi -e 'use strict;
+stderr:
+./calc.at:1347: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+1.3: syntax error, unexpected number
+./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1344: cat stderr
+stderr:
+1.3: syntax error, unexpected number
+input:
+input:
+  | (* *) + (*) + (*)
+./calc.at:1344:  $PREPARSER ./calc  input
+  | 1 + 2 * 3 + !- ++
+./calc.at:1347:  $PREPARSER ./calc  input
+./calc.at:1348: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30771,24 +30783,33 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1347: cat stderr
 ./calc.at:1346: cat stderr
-input:
-  | 1//2
-./calc.at:1347:  $PREPARSER ./calc  input
 stderr:
-1.3: syntax error
-./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | (1 + 1) / (1 - 1)
+  | (* *) + (*) + (*)
 ./calc.at:1346:  $PREPARSER ./calc  input
-1.3: syntax error
 stderr:
-1.11-17: error: null divisor
+stderr:
+stderr:
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
 ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1348: cat stderr
 stderr:
-./calc.at:1347: "$PERL" -pi -e 'use strict;
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+./calc.at:1344: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30798,8 +30819,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.11-17: error: null divisor
-./calc.at:1346: "$PERL" -pi -e 'use strict;
+input:
+./calc.at:1347: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30809,19 +30830,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1347: cat stderr
-./calc.at:1346: cat stderr
-input:
-  | error
-./calc.at:1347:  $PREPARSER ./calc  input
-stderr:
-1.1: syntax error
-./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-479. calc.at:1346:  ok
+  | 1//2
+./calc.at:1348:  $PREPARSER ./calc  input
 stderr:
-1.1: syntax error
-
-./calc.at:1347: "$PERL" -pi -e 'use strict;
+./calc.at:1346: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30831,20 +30843,15 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1347: cat stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1347:  $PREPARSER ./calc  input
-481. calc.at:1348: testing Calculator parse.error=detailed %locations  ...
-stderr:
-1.7: syntax error
-./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1348: mv calc.y.tmp calc.y
-
+./calc.at:1344: cat stderr
 stderr:
-./calc.at:1348: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-1.7: syntax error
-./calc.at:1347: "$PERL" -pi -e 'use strict;
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+./calc.at:1346: cat stderr
+input:
+./calc.at:1348: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30854,16 +30861,33 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1347: cat stderr
-input:
-  | 
-  | +1
+  | 1 + 2 * 3 + !* ++
 ./calc.at:1347:  $PREPARSER ./calc  input
+input:
 stderr:
-2.1: syntax error
+stderr:
+input:
+1.14: memory exhausted
 ./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1348: cat stderr
+  | 1 + 2 * 3 + !+ ++
+stdout:
+./calc.at:1344:  $PREPARSER ./calc  input
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1346:  $PREPARSER ./calc  input
 stderr:
-2.1: syntax error
+./types.at:139:  $PREPARSER ./test
+stderr:
+1.14: memory exhausted
+stderr:
+input:
+stderr:
+./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | error
+./calc.at:1348:  $PREPARSER ./calc  input
+./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1347: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -30874,14 +30898,24 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1347: cat stderr
-./calc.at:1347:  $PREPARSER ./calc  /dev/null
 stderr:
-1.1: syntax error
-./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.1: syntax error
-./calc.at:1347: "$PERL" -pi -e 'use strict;
+1.1: syntax error, unexpected invalid token
+./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1344: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+./calc.at:1346: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+stderr:
+input:
+./calc.at:1347: cat stderr
+  | 1 + 2 * 3 + !- ++
+./calc.at:1346:  $PREPARSER ./calc  input
+stderr:
+1.1: syntax error, unexpected invalid token
+./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+./calc.at:1348: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30891,24 +30925,23 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1347: cat stderr
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+  | (#) + (#) = 2222
 ./calc.at:1347:  $PREPARSER ./calc  input
 stderr:
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
+input:
+stderr:
+./calc.at:1348: cat stderr
+  | 1 + 2 * 3 + !- ++
+./calc.at:1344:  $PREPARSER ./calc  input
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
 ./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
-./calc.at:1347: "$PERL" -pi -e 'use strict;
+input:
+  | 1 = 2 = 3
+./calc.at:1348:  $PREPARSER ./calc  input
+./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1346: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30918,18 +30951,15 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1348: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
-./calc.at:1347: cat stderr
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1347:  $PREPARSER ./calc  input
 stderr:
-1.11: syntax error
-1.1-16: error: 2222 != 1
-./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.7: syntax error, unexpected '='
 stderr:
-1.11: syntax error
-1.1-16: error: 2222 != 1
+./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+stderr:
+stderr:
+1.7: syntax error, unexpected '='
 ./calc.at:1347: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -30940,20 +30970,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1347: cat stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1347:  $PREPARSER ./calc  input
-stderr:
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-./calc.at:1347: "$PERL" -pi -e 'use strict;
+./calc.at:1348: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30963,20 +30980,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1347: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1347:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-./calc.at:1347: "$PERL" -pi -e 'use strict;
+./calc.at:1346: cat stderr
+./calc.at:1348: cat stderr
+./calc.at:1344: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -30986,21 +30992,34 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
 ./calc.at:1347: cat stderr
 input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1347:  $PREPARSER ./calc  input
+  | 
+  | +1
+./calc.at:1348:  $PREPARSER ./calc  input
+./calc.at:1344: cat stderr
 stderr:
-./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 + !* ++
+./calc.at:1346:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1347: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+1.14: memory exhausted
+./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+2.1: syntax error, unexpected '+'
+./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
+1.14: memory exhausted
+  | (1 + #) = 1111
 input:
-  | 1 + 2 * 3 + !- ++
 ./calc.at:1347:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 + !* ++
+2.1: syntax error, unexpected '+'
+./calc.at:1344:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1347: "$PERL" -pi -e 'use strict;
+1.6: syntax error: invalid character: '#'
+./calc.at:1346: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -31010,15 +31029,26 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1347: cat stderr
-input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1347:  $PREPARSER ./calc  input
-stderr:
-1.14: memory exhausted
 ./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.14: memory exhausted
+stderr:
+stdout:
+stderr:
+1.6: syntax error: invalid character: '#'
+memory exhausted
+./types.at:139: ./check
+./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./calc.at:1348: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1347: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -31029,18 +31059,55 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+memory exhausted
+./calc.at:1348: cat stderr
+./calc.at:1346: cat stderr
+./calc.at:1348:  $PREPARSER ./calc  /dev/null
 ./calc.at:1347: cat stderr
+./calc.at:1344: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1344: cat stderr
 input:
+stderr:
   | (#) + (#) = 2222
-./calc.at:1347:  $PREPARSER ./calc  input
+1.1: syntax error, unexpected end of file
+./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1346:  $PREPARSER ./calc  input
+input:
 stderr:
 1.2: syntax error: invalid character: '#'
 1.8: syntax error: invalid character: '#'
+./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
+  | (# + 1) = 1111
+./calc.at:1347:  $PREPARSER ./calc  input
+stderr:
+  | (#) + (#) = 2222
+./calc.at:1344:  $PREPARSER ./calc  input
+1.2: syntax error: invalid character: '#'
 ./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
+1.1: syntax error, unexpected end of file
+stderr:
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 1.2: syntax error: invalid character: '#'
 1.8: syntax error: invalid character: '#'
-./calc.at:1347: "$PERL" -pi -e 'use strict;
+1.2: syntax error: invalid character: '#'
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./calc.at:1348: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -31050,16 +31117,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1347: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1347:  $PREPARSER ./calc  input
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1347: "$PERL" -pi -e 'use strict;
+./calc.at:1346: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -31069,15 +31128,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1347: cat stderr
+./calc.at:1348: cat stderr
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
 input:
-  | (# + 1) = 1111
-./calc.at:1347:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error: invalid character: '#'
-./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error: invalid character: '#'
 ./calc.at:1347: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -31088,16 +31142,25 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1347: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1347:  $PREPARSER ./calc  input
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1348:  $PREPARSER ./calc  input
 stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.1-46: error: 4444 != 1
+./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1346: cat stderr
 stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1347: "$PERL" -pi -e 'use strict;
+input:
+./calc.at:1347: cat stderr
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.1-46: error: 4444 != 1
+./calc.at:1344: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -31107,16 +31170,19 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1347: cat stderr
+  | (1 + #) = 1111
+./calc.at:1346:  $PREPARSER ./calc  input
 input:
-  | (1 + 1) / (1 - 1)
+  | (1 + # + 1) = 1111
 ./calc.at:1347:  $PREPARSER ./calc  input
 stderr:
-1.11-17: error: null divisor
-./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.6: syntax error: invalid character: '#'
+./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.11-17: error: null divisor
-./calc.at:1347: "$PERL" -pi -e 'use strict;
+1.6: syntax error: invalid character: '#'
+./calc.at:1344: cat stderr
+./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1348: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -31126,80 +31192,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1347: cat stderr
-480. calc.at:1347:  ok
-
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-447. types.at:139:  ok
-482. calc.at:1350: testing Calculator parse.error=detailed %locations %header api.prefix={calc} %verbose %yacc  ...
-./calc.at:1350:
-    if "$POSIXLY_CORRECT_IS_EXPORTED"; then
-      sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y
-    else
-      mv calc.y.tmp calc.y
-    fi
-
-
-
-./calc.at:1350: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-483. calc.at:1351: testing Calculator parse.error=detailed %locations %header %name-prefix "calc" api.token.prefix={TOK_} %verbose %yacc  ...
-./calc.at:1351:
-    if "$POSIXLY_CORRECT_IS_EXPORTED"; then
-      sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y
-    else
-      mv calc.y.tmp calc.y
-    fi
-
-
-stderr:
-stdout:
-./calc.at:1351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-./calc.at:1348: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
-./calc.at:1350: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
-./calc.at:1348: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c
-
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1348:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1348: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+1.6: syntax error: invalid character: '#'
 input:
-  | 1 2
-./calc.at:1348:  $PREPARSER ./calc  input
-stderr:
-1.3: syntax error, unexpected number
-./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.6: syntax error: invalid character: '#'
+  | (1 + #) = 1111
+./calc.at:1344:  $PREPARSER ./calc  input
 stderr:
-1.3: syntax error, unexpected number
-./calc.at:1351: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
-./calc.at:1348: "$PERL" -pi -e 'use strict;
+./calc.at:1348: cat stderr
+./calc.at:1346: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -31209,16 +31211,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1348: cat stderr
-input:
-  | 1//2
-./calc.at:1348:  $PREPARSER ./calc  input
-stderr:
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1348: "$PERL" -pi -e 'use strict;
+syntax error: invalid character: '#'
+./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1347: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -31228,16 +31223,19 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1348: cat stderr
 input:
-  | error
+stderr:
+  | (!!) + (1 2) = 1
 ./calc.at:1348:  $PREPARSER ./calc  input
 stderr:
-1.1: syntax error, unexpected invalid token
+1.11: syntax error, unexpected number
+1.1-16: error: 2222 != 1
 ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error: invalid character: '#'
+./calc.at:1347: cat stderr
 stderr:
-1.1: syntax error, unexpected invalid token
-./calc.at:1348: "$PERL" -pi -e 'use strict;
+./calc.at:1346: cat stderr
+./calc.at:1344: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -31247,15 +31245,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1348: cat stderr
+1.11: syntax error, unexpected number
+1.1-16: error: 2222 != 1
 input:
-  | 1 = 2 = 3
-./calc.at:1348:  $PREPARSER ./calc  input
-stderr:
-1.7: syntax error, unexpected '='
-./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | (# + 1) = 1111
+./calc.at:1346:  $PREPARSER ./calc  input
+./calc.at:1344: cat stderr
 stderr:
-1.7: syntax error, unexpected '='
+  | (1 + 1) / (1 - 1)
+./calc.at:1347:  $PREPARSER ./calc  input
 ./calc.at:1348: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -31266,17 +31265,33 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+stderr:
+1.2: syntax error: invalid character: '#'
+./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (# + 1) = 1111
 ./calc.at:1348: cat stderr
+./calc.at:1344:  $PREPARSER ./calc  input
+1.11-17: error: null divisor
 input:
-  | 
-  | +1
+./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+  | (- *) + (1 2) = 1
 ./calc.at:1348:  $PREPARSER ./calc  input
 stderr:
-2.1: syntax error, unexpected '+'
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
+1.1-17: error: 2222 != 1
 ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error: invalid character: '#'
+./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-2.1: syntax error, unexpected '+'
-./calc.at:1348: "$PERL" -pi -e 'use strict;
+1.2: syntax error: invalid character: '#'
+1.11-17: error: null divisor
+stderr:
+stderr:
+./calc.at:1347: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -31286,14 +31301,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1348: cat stderr
-./calc.at:1348:  $PREPARSER ./calc  /dev/null
-stderr:
-1.1: syntax error, unexpected end of file
-./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.1: syntax error, unexpected end of file
-./calc.at:1348: "$PERL" -pi -e 'use strict;
+syntax error: invalid character: '#'
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
+1.1-17: error: 2222 != 1
+./calc.at:1347: cat stderr
+./calc.at:1346: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -31303,24 +31316,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1348: cat stderr
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1348:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.1-46: error: 4444 != 1
-./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.1-46: error: 4444 != 1
-./calc.at:1348: "$PERL" -pi -e 'use strict;
+./calc.at:1344: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -31330,17 +31326,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1348: cat stderr
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1348:  $PREPARSER ./calc  input
-stderr:
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
-./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
+480. calc.at:1347:  ok
+./calc.at:1344: cat stderr
+./calc.at:1346: cat stderr
 ./calc.at:1348: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -31351,20 +31339,28 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+  | (1 + # + 1) = 1111
+./calc.at:1344:  $PREPARSER ./calc  input
+stderr:
 ./calc.at:1348: cat stderr
+syntax error: invalid character: '#'
+./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+syntax error: invalid character: '#'
+input:
+  | (1 + # + 1) = 1111
+./calc.at:1346:  $PREPARSER ./calc  input
 input:
-  | (- *) + (1 2) = 1
-./calc.at:1348:  $PREPARSER ./calc  input
 stderr:
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
-./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
+  | (* *) + (*) + (*)
+./calc.at:1348:  $PREPARSER ./calc  input
+1.6: syntax error: invalid character: '#'
+./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
-./calc.at:1348: "$PERL" -pi -e 'use strict;
+1.6: syntax error: invalid character: '#'
+./calc.at:1344: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -31374,19 +31370,42 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1348: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1348:  $PREPARSER ./calc  input
 stderr:
 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+./calc.at:1346: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1344: cat stderr
 stderr:
+482. calc.at:1350: testing Calculator parse.error=detailed %locations %header api.prefix={calc} %verbose %yacc  ...
+input:
 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+./calc.at:1350:
+    if "$POSIXLY_CORRECT_IS_EXPORTED"; then
+      sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y
+    else
+      mv calc.y.tmp calc.y
+    fi
+
+
+  | (1 + 1) / (1 - 1)
+./calc.at:1344:  $PREPARSER ./calc  input
+./calc.at:1346: cat stderr
+stderr:
+error: null divisor
+./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1348: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -31397,27 +31416,57 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+error: null divisor
+input:
+./calc.at:1350: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
 ./calc.at:1348: cat stderr
+  | (1 + 1) / (1 - 1)
+./calc.at:1346:  $PREPARSER ./calc  input
+stderr:
+1.11-17: error: null divisor
+./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
   | 1 + 2 * 3 + !+ ++
+./calc.at:1344: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1348:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.11-17: error: null divisor
+./calc.at:1344: cat stderr
 stderr:
+./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+478. calc.at:1344:  ok
+./calc.at:1346: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
-stdout:
 ./calc.at:1348: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-./types.at:139:  $PREPARSER ./test
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
 input:
-448. types.at:139:  ok
+./calc.at:1346: cat stderr
   | 1 + 2 * 3 + !- ++
 ./calc.at:1348:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1350: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
 ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+479. calc.at:1346:  ok
 stderr:
-
 ./calc.at:1348: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -31428,19 +31477,30 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+
+483. calc.at:1351: testing Calculator parse.error=detailed %locations %header %name-prefix "calc" api.token.prefix={TOK_} %verbose %yacc  ...
+./calc.at:1351:
+    if "$POSIXLY_CORRECT_IS_EXPORTED"; then
+      sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y
+    else
+      mv calc.y.tmp calc.y
+    fi
+
+
 ./calc.at:1348: cat stderr
 input:
   | 1 + 2 * 3 + !* ++
 ./calc.at:1348:  $PREPARSER ./calc  input
-484. calc.at:1353: testing Calculator %debug  ...
-./calc.at:1353: mv calc.y.tmp calc.y
-
-./calc.at:1353: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+./calc.at:1351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
 stderr:
 1.14: memory exhausted
 ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 1.14: memory exhausted
+484. calc.at:1353: testing Calculator %debug  ...
+./calc.at:1353: mv calc.y.tmp calc.y
+
+./calc.at:1353: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
 ./calc.at:1348: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -31455,6 +31515,7 @@
 input:
   | (#) + (#) = 2222
 ./calc.at:1348:  $PREPARSER ./calc  input
+./calc.at:1351: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
 stderr:
 1.2: syntax error: invalid character: '#'
 1.8: syntax error: invalid character: '#'
@@ -31520,9 +31581,6 @@
 ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 1.6: syntax error: invalid character: '#'
-stderr:
-stdout:
-./calc.at:1350: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
 ./calc.at:1348: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -31534,44 +31592,14 @@
   }eg
 ' expout || exit 77
 ./calc.at:1348: cat stderr
-./calc.at:1350: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c calc.h
-
 input:
   | (1 + 1) / (1 - 1)
 ./calc.at:1348:  $PREPARSER ./calc  input
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1350:  $PREPARSER ./calc  input
 stderr:
 1.11-17: error: null divisor
 ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 1.11-17: error: null divisor
-stderr:
-./calc.at:1350: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 ./calc.at:1348: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -31582,38 +31610,34 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | 1 2
-./calc.at:1350:  $PREPARSER ./calc  input
-stderr:
-1.3: syntax error, unexpected number
-./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1348: cat stderr
 stderr:
-1.3: syntax error, unexpected number
-481. calc.at:1348: ./calc.at:1350: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
- ok
-./calc.at:1350: cat stderr
-stderr:
-input:
 stdout:
-  | 1//2
-./calc.at:1350:  $PREPARSER ./calc  input
-./calc.at:1351: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
+./types.at:139:  $PREPARSER ./test
 stderr:
+481. calc.at:1348:  ok
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1351: "$PERL" -ne '
+485. calc.at:1354: testing Calculator parse.error=detailed %debug %locations %header %name-prefix "calc" %verbose %yacc  ...
+./calc.at:1354:
+    if "$POSIXLY_CORRECT_IS_EXPORTED"; then
+      sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y
+    else
+      mv calc.y.tmp calc.y
+    fi
+
+
+./calc.at:1354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+stderr:
+stdout:
+stderr:
+./types.at:139: ./check
+stdout:
+./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
+./calc.at:1350: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
+./calc.at:1350: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -31624,19 +31648,8 @@
         || /\t/
         )' calc.c calc.h
 
-stderr:
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+./calc.at:1354: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
 input:
-./calc.at:1350: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -31650,242 +31663,20 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1351:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1350: cat stderr
-stderr:
-input:
-  | error
 ./calc.at:1350:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1351: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-1.1: syntax error, unexpected invalid token
 ./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.1: syntax error, unexpected invalid token
+./calc.at:1350: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 input:
   | 1 2
-./calc.at:1351:  $PREPARSER ./calc  input
-stderr:
-485. calc.at:1354: testing Calculator parse.error=detailed %debug %locations %header %name-prefix "calc" %verbose %yacc  ...
-./calc.at:1354:
-    if "$POSIXLY_CORRECT_IS_EXPORTED"; then
-      sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y
-    else
-      mv calc.y.tmp calc.y
-    fi
-
-
-1.3: syntax error, unexpected number
-./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1350: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-./calc.at:1350: cat stderr
-1.3: syntax error, unexpected number
-./calc.at:1354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-./calc.at:1351: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-  | 1 = 2 = 3
-./calc.at:1350:  $PREPARSER ./calc  input
-./calc.at:1351: cat stderr
-stderr:
-1.7: syntax error, unexpected '='
-./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-stderr:
-  | 1//2
-./calc.at:1351:  $PREPARSER ./calc  input
-1.7: syntax error, unexpected '='
-stderr:
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1350: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1350: cat stderr
-stderr:
-./calc.at:1351: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stdout:
-./calc.at:1351: cat stderr
-input:
-./types.at:139:  $PREPARSER ./test
-  | 
-  | +1
-./calc.at:1350:  $PREPARSER ./calc  input
-stderr:
-stderr:
-2.1: syntax error, unexpected '+'
-./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-stderr:
-2.1: syntax error, unexpected '+'
-./calc.at:1354: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | error
-./calc.at:1351:  $PREPARSER ./calc  input
-======== Testing with C++ standard flags: ''
-stderr:
-1.1: syntax error, unexpected invalid token
-./calc.at:1350: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1350: cat stderr
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-stderr:
-1.1: syntax error, unexpected invalid token
-./calc.at:1350:  $PREPARSER ./calc  /dev/null
-stderr:
-1.1: syntax error, unexpected end of file
-./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1351: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-1.1: syntax error, unexpected end of file
-./calc.at:1351: cat stderr
-./calc.at:1350: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-./calc.at:1350: cat stderr
-  | 1 = 2 = 3
-./calc.at:1351:  $PREPARSER ./calc  input
-stderr:
-1.7: syntax error, unexpected '='
-./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-stderr:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1350:  $PREPARSER ./calc  input
-1.7: syntax error, unexpected '='
-stderr:
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.1-46: error: 4444 != 1
-./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.1-46: error: 4444 != 1
-./calc.at:1351: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1351: cat stderr
-./calc.at:1350: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-  | 
-  | +1
-./calc.at:1351:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1350: cat stderr
-2.1: syntax error, unexpected '+'
-./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-2.1: syntax error, unexpected '+'
-input:
-  | (!!) + (1 2) = 1
 ./calc.at:1350:  $PREPARSER ./calc  input
-./calc.at:1351: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 stderr:
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
+1.3: syntax error, unexpected number
 ./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1351: cat stderr
-stderr:
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
-./calc.at:1351:  $PREPARSER ./calc  /dev/null
 stderr:
-1.1: syntax error, unexpected end of file
-./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.3: syntax error, unexpected number
 ./calc.at:1350: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -31897,253 +31688,11 @@
   }eg
 ' expout || exit 77
 stderr:
-1.1: syntax error, unexpected end of file
 ./calc.at:1350: cat stderr
-./calc.at:1351: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-./calc.at:1351: cat stderr
-stderr:
-  | (- *) + (1 2) = 1
-./calc.at:1350:  $PREPARSER ./calc  input
 stdout:
 stderr:
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
-./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./types.at:139: ./check
-input:
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1351:  $PREPARSER ./calc  input
-stderr:
-stderr:
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.1-46: error: 4444 != 1
-./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1350: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.1-46: error: 4444 != 1
-./calc.at:1350: cat stderr
-./calc.at:1351: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-  | (* *) + (*) + (*)
-./calc.at:1350:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1351: cat stderr
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1351:  $PREPARSER ./calc  input
-stderr:
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-stderr:
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
-./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1350: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
-./calc.at:1350: cat stderr
-./calc.at:1351: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1350:  $PREPARSER ./calc  input
-./calc.at:1351: cat stderr
-stderr:
-./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1351:  $PREPARSER ./calc  input
-stderr:
-stderr:
-./calc.at:1350: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
-./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1350:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1351: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-./calc.at:1351: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1351:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-./calc.at:1350: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1350: cat stderr
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-./calc.at:1351: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1350:  $PREPARSER ./calc  input
-./calc.at:1351: cat stderr
-stderr:
-1.14: memory exhausted
-./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 1 + 2 * 3 + !+ ++
-stderr:
-./calc.at:1351:  $PREPARSER ./calc  input
-stderr:
-1.14: memory exhausted
-./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1351: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-./calc.at:1350: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1350: cat stderr
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1351:  $PREPARSER ./calc  input
-stderr:
-input:
-./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (#) + (#) = 2222
-./calc.at:1350:  $PREPARSER ./calc  input
-stderr:
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-stdout:
 ./calc.at:1353: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
-./calc.at:1351: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1350: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+stdout:
 ./calc.at:1353: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
@@ -32155,8 +31704,23 @@
         || /\t/
         )' calc.c
 
-./calc.at:1351: cat stderr
+./calc.at:1351: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
+input:
+  | 1//2
+./calc.at:1350:  $PREPARSER ./calc  input
 input:
+./calc.at:1351: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c calc.h
+
+stderr:
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -32170,10 +31734,27 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1350: cat stderr
 ./calc.at:1353:  $PREPARSER ./calc  input
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 input:
+stderr:
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1351:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -33192,11 +32773,20 @@
 Cleanup: popping token "end of input" (1.1: )
 Cleanup: popping nterm input (1.1: )
 ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1351:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
+./calc.at:1350: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Stack now 0
@@ -34215,31 +33805,19 @@
 Cleanup: popping token "end of input" (1.1: )
 Cleanup: popping nterm input (1.1: )
 ./calc.at:1353: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-  | (1 + #) = 1111
-./calc.at:1350:  $PREPARSER ./calc  input
-1.14: memory exhausted
-./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+./calc.at:1351: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+./calc.at:1350: cat stderr
+input:
 input:
-1.14: memory exhausted
-stderr:
   | 1 2
 ./calc.at:1353:  $PREPARSER ./calc  input
-1.6: syntax error: invalid character: '#'
+  | 1 2
+./calc.at:1351:  $PREPARSER ./calc  input
+input:
 stderr:
-./calc.at:1351: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+stderr:
+  | error
+./calc.at:1350:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -34261,7 +33839,12 @@
 Cleanup: discarding lookahead token "number" (1.1: 2)
 Stack now 0
 ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1351: cat stderr
+stderr:
+1.3: syntax error, unexpected number
+./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.1: syntax error, unexpected invalid token
+./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -34283,7 +33866,10 @@
 Stack now 0
 Cleanup: discarding lookahead token "number" (1.1: 2)
 Stack now 0
-./calc.at:1350: "$PERL" -pi -e 'use strict;
+stderr:
+1.3: syntax error, unexpected number
+1.1: syntax error, unexpected invalid token
+./calc.at:1351: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -34293,8 +33879,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-./calc.at:1353: "$PERL" -pi -e 'use strict;
+./calc.at:1350: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -34304,29 +33889,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | (#) + (#) = 2222
-./calc.at:1351:  $PREPARSER ./calc  input
-./calc.at:1350: cat stderr
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-input:
-./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (# + 1) = 1111
-./calc.at:1350:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1353: cat stderr
-1.2: syntax error: invalid character: '#'
-./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-stderr:
-input:
-1.2: syntax error: invalid character: '#'
-  | 1//2
-./calc.at:1353:  $PREPARSER ./calc  input
-./calc.at:1351: "$PERL" -pi -e 'use strict;
+./calc.at:1353: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -34336,19 +33899,28 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
+./calc.at:1350: cat stderr
 ./calc.at:1351: cat stderr
+./calc.at:1353: cat stderr
 input:
-./calc.at:1350: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+input:
+  | 1//2
+  | 1 = 2 = 3
+./calc.at:1350:  $PREPARSER ./calc  input
+./calc.at:1351:  $PREPARSER ./calc  input
+stderr:
+input:
+stderr:
+  | 1//2
+./calc.at:1353:  $PREPARSER ./calc  input
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+1.7: syntax error, unexpected '='
+./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+stderr:
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
 Starting parse
 Entering state 0
 Stack now 0
@@ -34377,16 +33949,7 @@
 Cleanup: discarding lookahead token '/' (1.1: )
 Stack now 0
 ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (1 + #) = 1111
-./calc.at:1351:  $PREPARSER ./calc  input
-./calc.at:1350: cat stderr
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-1.6: syntax error: invalid character: '#'
-input:
+1.7: syntax error, unexpected '='
 ./calc.at:1351: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -34397,8 +33960,18 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | (1 + # + 1) = 1111
-./calc.at:1350:  $PREPARSER ./calc  input
+./calc.at:1350: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+./calc.at:1351: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -34426,12 +33999,36 @@
 Stack now 0
 Cleanup: discarding lookahead token '/' (1.1: )
 Stack now 0
+./calc.at:1350: cat stderr
+input:
+  | error
+./calc.at:1351:  $PREPARSER ./calc  input
 stderr:
-1.6: syntax error: invalid character: '#'
+1.1: syntax error, unexpected invalid token
+input:
+./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 
+  | +1
+./calc.at:1350:  $PREPARSER ./calc  input
+./calc.at:1353: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+stderr:
+2.1: syntax error, unexpected '+'
 ./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.1: syntax error, unexpected invalid token
 stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1350: "$PERL" -pi -e 'use strict;
+2.1: syntax error, unexpected '+'
+./calc.at:1353: cat stderr
+./calc.at:1351: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -34443,7 +34040,7 @@
 ' expout || exit 77
 ./calc.at:1351: cat stderr
 input:
-./calc.at:1353: "$PERL" -pi -e 'use strict;
+./calc.at:1350: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -34453,26 +34050,41 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1350: cat stderr
-  | (# + 1) = 1111
+input:
+  | 1 = 2 = 3
 ./calc.at:1351:  $PREPARSER ./calc  input
 stderr:
-input:
-1.2: syntax error: invalid character: '#'
-  | (1 + 1) / (1 - 1)
-./calc.at:1350:  $PREPARSER ./calc  input
+  | error
+./calc.at:1353:  $PREPARSER ./calc  input
+./calc.at:1350: cat stderr
+1.7: syntax error, unexpected '='
 ./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1353: cat stderr
+./calc.at:1350:  $PREPARSER ./calc  /dev/null
 stderr:
-1.11-17: error: null divisor
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token "invalid token" (1.1: )
+syntax error
+Cleanup: discarding lookahead token "invalid token" (1.1: )
+Stack now 0
+./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+1.1: syntax error, unexpected end of file
 ./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.2: syntax error: invalid character: '#'
+1.7: syntax error, unexpected '='
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token "invalid token" (1.1: )
+syntax error
+Cleanup: discarding lookahead token "invalid token" (1.1: )
+Stack now 0
 stderr:
-input:
-1.11-17: error: null divisor
-  | error
-./calc.at:1353:  $PREPARSER ./calc  input
 ./calc.at:1351: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -34483,17 +34095,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token "invalid token" (1.1: )
-syntax error
-Cleanup: discarding lookahead token "invalid token" (1.1: )
-Stack now 0
-./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1351: cat stderr
+1.1: syntax error, unexpected end of file
+input:
 ./calc.at:1350: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -34504,23 +34108,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token "invalid token" (1.1: )
-syntax error
-Cleanup: discarding lookahead token "invalid token" (1.1: )
-Stack now 0
-input:
-./calc.at:1350: cat stderr
-  | (1 + # + 1) = 1111
+  | 
+  | +1
 ./calc.at:1351:  $PREPARSER ./calc  input
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 ./calc.at:1353: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -34531,8 +34121,26 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-482. calc.at:1350:  ok
-1.6: syntax error: invalid character: '#'
+stderr:
+2.1: syntax error, unexpected '+'
+./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1350: cat stderr
+stderr:
+./calc.at:1353: cat stderr
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+2.1: syntax error, unexpected '+'
+./calc.at:1350:  $PREPARSER ./calc  input
+stderr:
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.1-46: error: 4444 != 1
+input:
+./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 = 2 = 3
+./calc.at:1353:  $PREPARSER ./calc  input
 ./calc.at:1351: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -34543,16 +34151,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1353: cat stderr
-
-./calc.at:1351: cat stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1353:  $PREPARSER ./calc  input
-input:
 stderr:
-  | (1 + 1) / (1 - 1)
-./calc.at:1351:  $PREPARSER ./calc  input
+stderr:
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.1-46: error: 4444 != 1
 Starting parse
 Entering state 0
 Stack now 0
@@ -34593,10 +34198,7 @@
 Cleanup: discarding lookahead token '=' (1.1: )
 Stack now 0
 ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.11-17: error: null divisor
-./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+./calc.at:1351: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -34637,8 +34239,11 @@
 Stack now 0
 Cleanup: discarding lookahead token '=' (1.1: )
 Stack now 0
-1.11-17: error: null divisor
-./calc.at:1351: "$PERL" -pi -e 'use strict;
+./calc.at:1351:  $PREPARSER ./calc  /dev/null
+stderr:
+1.1: syntax error, unexpected end of file
+./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1350: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -34648,6 +34253,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+./calc.at:1350: cat stderr
+1.1: syntax error, unexpected end of file
 ./calc.at:1353: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -34658,22 +34266,54 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1351: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+  | (!!) + (1 2) = 1
+./calc.at:1350:  $PREPARSER ./calc  input
+stderr:
 ./calc.at:1351: cat stderr
+1.11: syntax error, unexpected number
+1.1-16: error: 2222 != 1
+./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1353: cat stderr
-486. calc.at:1355: testing Calculator parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc  ...
-./calc.at:1355:
-    if "$POSIXLY_CORRECT_IS_EXPORTED"; then
-      sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y
-    else
-      mv calc.y.tmp calc.y
-    fi
-
-
-483. calc.at:1351:  ok
+stderr:
+input:
+1.11: syntax error, unexpected number
+1.1-16: error: 2222 != 1
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1351:  $PREPARSER ./calc  input
+stderr:
 input:
+./calc.at:1350: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.1-46: error: 4444 != 1
+./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 
   | +1
 ./calc.at:1353:  $PREPARSER ./calc  input
+./calc.at:1350: cat stderr
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -34700,9 +34340,16 @@
 Stack now 0
 Cleanup: discarding lookahead token '+' (1.1: )
 Stack now 0
-./calc.at:1355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
 ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.1-46: error: 4444 != 1
+input:
 stderr:
+  | (- *) + (1 2) = 1
+./calc.at:1350:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -34728,7 +34375,26 @@
 Stack now 0
 Cleanup: discarding lookahead token '+' (1.1: )
 Stack now 0
-
+./calc.at:1351: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
+1.1-17: error: 2222 != 1
+./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1351: cat stderr
+stderr:
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
+1.1-17: error: 2222 != 1
 ./calc.at:1353: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -34739,9 +34405,33 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+  | (!!) + (1 2) = 1
+./calc.at:1351:  $PREPARSER ./calc  input
+./calc.at:1350: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+1.11: syntax error, unexpected number
+1.1-16: error: 2222 != 1
+./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./calc.at:1353: cat stderr
+./calc.at:1350: cat stderr
+1.11: syntax error, unexpected number
+1.1-16: error: 2222 != 1
 ./calc.at:1353:  $PREPARSER ./calc  /dev/null
+input:
 stderr:
+  | (* *) + (*) + (*)
+./calc.at:1350:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -34752,6 +34442,21 @@
 Stack now 0
 ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1351: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -34760,6 +34465,14 @@
 syntax error
 Cleanup: discarding lookahead token "end of input" (1.1: )
 Stack now 0
+./calc.at:1351: cat stderr
+stderr:
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+input:
+  | (- *) + (1 2) = 1
+./calc.at:1351:  $PREPARSER ./calc  input
 ./calc.at:1353: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -34770,18 +34483,47 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1350: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
+1.1-17: error: 2222 != 1
+./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1350: cat stderr
+stderr:
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
+1.1-17: error: 2222 != 1
 ./calc.at:1353: cat stderr
-487. calc.at:1357: testing Calculator api.pure=full parse.error=detailed %debug %locations %header %name-prefix "calc" %verbose %yacc  ...
-./calc.at:1357:
-    if "$POSIXLY_CORRECT_IS_EXPORTED"; then
-      sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y
-    else
-      mv calc.y.tmp calc.y
-    fi
-
-
 input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1350:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1351: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+./calc.at:1351: cat stderr
+stderr:
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1350: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 ./calc.at:1353:  $PREPARSER ./calc  input
 stderr:
 Starting parse
@@ -35102,7 +34844,13 @@
 Cleanup: popping token "end of input" (1.1: )
 Cleanup: popping nterm input (1.1: )
 ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+input:
+  | (* *) + (*) + (*)
+./calc.at:1351:  $PREPARSER ./calc  input
+input:
+stderr:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1350:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -35421,7 +35169,27 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (1.1: )
 Cleanup: popping nterm input (1.1: )
-./calc.at:1355: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+./calc.at:1351: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
 ./calc.at:1353: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -35432,12 +35200,37 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1350: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1351: cat stderr
+./calc.at:1350: cat stderr
+input:
 ./calc.at:1353: cat stderr
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1351:  $PREPARSER ./calc  input
 input:
-./calc.at:1357: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
+stderr:
+./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 + !* ++
+./calc.at:1350:  $PREPARSER ./calc  input
+input:
+stderr:
   | (!!) + (1 2) = 1
 ./calc.at:1353:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1351: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+1.14: memory exhausted
+./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -35580,6 +35373,12 @@
 Cleanup: popping nterm input (1.1: )
 ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stdout:
+1.14: memory exhausted
+input:
+stderr:
+./types.at:139:  $PREPARSER ./test
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -35720,6 +35519,38 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (1.1: )
 Cleanup: popping nterm input (1.1: )
+  | 1 + 2 * 3 + !- ++
+./calc.at:1351:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1350: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+stdout:
+./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1354: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
+stderr:
+./calc.at:1350: cat stderr
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1354: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c calc.h
+
+input:
 ./calc.at:1353: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -35730,1807 +35561,189 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1353: cat stderr
+  | (#) + (#) = 2222
+./calc.at:1351: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+449. types.at:139:  ok
+./calc.at:1350:  $PREPARSER ./calc  input
+./calc.at:1351: cat stderr
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1354:  $PREPARSER ./calc  input
+
+stderr:
 input:
-  | (- *) + (1 2) = 1
-./calc.at:1353:  $PREPARSER ./calc  input
 stderr:
+  | 1 + 2 * 3 + !* ++
+./calc.at:1351:  $PREPARSER ./calc  input
+./calc.at:1353: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token '-' (1.1: )
-Shifting token '-' (1.1: )
-Entering state 2
-Stack now 0 4 2
-Reading a token
-Next token is token '*' (1.1: )
-syntax error
-Shifting token error (1.1: )
-Entering state 9
-Stack now 0 4 2 9
-Reducing stack by rule 15 (line 120):
-   $1 = token '-' (1.1: )
-   $2 = token error (1.1: )
-Stack now 0 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 4 11
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
-Error: popping token error (1.1: )
-Stack now 0 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
 Entering state 21
 Stack now 0 8 21
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
 Entering state 1
-Stack now 0 8 21 4 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 12
-Stack now 0 8 21 4 12
+Stack now 0 8 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
 Reading a token
-Next token is token "number" (1.1: 2)
-syntax error
-Error: popping nterm exp (1.1: 1)
-Stack now 0 8 21 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token "number" (1.1: 2)
-Error: discarding token "number" (1.1: 2)
-Error: popping token error (1.1: )
-Stack now 0 8 21 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 8 21 4 11
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
 Reading a token
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack by rule 9 (line 120):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
 Entering state 30
 Stack now 0 8 21 30
-Reading a token
-Next token is token '=' (1.1: )
-Reducing stack by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 2222)
+Next token is token '=' (1.11: )
+Reducing stack by rule 7 (line 118):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
 Stack now 0 8
-Next token is token '=' (1.1: )
-Shifting token '=' (1.1: )
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
 Entering state 19
 Stack now 0 8 19
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token number (1.13: 7)
+Shifting token number (1.13: 7)
 Entering state 1
 Stack now 0 8 19 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.13: 7)
+-> $$ = nterm exp (1.13: 7)
 Entering state 28
 Stack now 0 8 19 28
 Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack by rule 6 (line 82):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '=' (1.1: )
-   $3 = nterm exp (1.1: 1)
-error: 2222 != 1
--> $$ = nterm exp (1.1: 2222)
+Next token is token '\n' (1.14-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-9: 7)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13: 7)
+-> $$ = nterm exp (1.1-13: 7)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
+Next token is token '\n' (1.14-2.0: )
+Shifting token '\n' (1.14-2.0: )
 Entering state 25
 Stack now 0 8 25
-Reducing stack by rule 4 (line 77):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-13: 7)
+   $2 = token '\n' (1.14-2.0: )
+-> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
-Reducing stack by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" (1.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token '-' (1.1: )
-Shifting token '-' (1.1: )
-Entering state 2
-Stack now 0 4 2
-Reading a token
-Next token is token '*' (1.1: )
-syntax error
-Shifting token error (1.1: )
-Entering state 9
-Stack now 0 4 2 9
-Reducing stack by rule 15 (line 120):
-   $1 = token '-' (1.1: )
-   $2 = token error (1.1: )
-Stack now 0 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 4 11
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
-Error: popping token error (1.1: )
-Stack now 0 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
+Next token is token number (2.1: 1)
+Shifting token number (2.1: 1)
+Entering state 1
+Stack now 0 6 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (2.1: 1)
+-> $$ = nterm exp (2.1: 1)
 Entering state 8
-Stack now 0 8
+Stack now 0 6 8
 Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
+Next token is token '+' (2.3: )
+Shifting token '+' (2.3: )
 Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 8 21 4
+Stack now 0 6 8 21
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token number (2.5: 2)
+Shifting token number (2.5: 2)
 Entering state 1
-Stack now 0 8 21 4 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 12
-Stack now 0 8 21 4 12
-Reading a token
-Next token is token "number" (1.1: 2)
-syntax error
-Error: popping nterm exp (1.1: 1)
-Stack now 0 8 21 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token "number" (1.1: 2)
-Error: discarding token "number" (1.1: 2)
-Error: popping token error (1.1: )
-Stack now 0 8 21 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
+Stack now 0 6 8 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (2.5: 2)
+-> $$ = nterm exp (2.5: 2)
 Entering state 30
-Stack now 0 8 21 30
+Stack now 0 6 8 21 30
 Reading a token
-Next token is token '=' (1.1: )
-Reducing stack by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.1: )
-Shifting token '=' (1.1: )
-Entering state 19
-Stack now 0 8 19
+Next token is token '*' (2.7: )
+Shifting token '*' (2.7: )
+Entering state 22
+Stack now 0 6 8 21 30 22
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token '-' (2.9: )
+Shifting token '-' (2.9: )
+Entering state 2
+Stack now 0 6 8 21 30 22 2
+Reading a token
+Next token is token number (2.10: 3)
+Shifting token number (2.10: 3)
 Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack by rule 6 (line 82):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '=' (1.1: )
-   $3 = nterm exp (1.1: 1)
-error: 2222 != 1
--> $$ = nterm exp (1.1: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 77):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (1.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-./calc.at:1353: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1353: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1353:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token '*' (1.1: )
-syntax error
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 4 11
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
-Error: popping token error (1.1: )
-Stack now 0 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
-Error: popping token error (1.1: )
-Stack now 0 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token '*' (1.1: )
-syntax error
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
-Error: popping token error (1.1: )
-Stack now 0 8 21 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '+' (1.1: )
-Reducing stack by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token '*' (1.1: )
-syntax error
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
-Error: popping token error (1.1: )
-Stack now 0 8 21 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack by rule 7 (line 98):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 3333)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 77):
-   $1 = nterm exp (1.1: 3333)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (1.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token '*' (1.1: )
-syntax error
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 4 11
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
-Error: popping token error (1.1: )
-Stack now 0 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
-Error: popping token error (1.1: )
-Stack now 0 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token '*' (1.1: )
-syntax error
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
-Error: popping token error (1.1: )
-Stack now 0 8 21 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '+' (1.1: )
-Reducing stack by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token '*' (1.1: )
-syntax error
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
-Error: popping token error (1.1: )
-Stack now 0 8 21 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack by rule 7 (line 98):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 3333)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 77):
-   $1 = nterm exp (1.1: 3333)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (1.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-./calc.at:1353: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1353: cat stderr
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1353:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token "number" (1.1: 2)
-Shifting token "number" (1.1: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 2)
--> $$ = nterm exp (1.1: 2)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.1: )
-Shifting token '*' (1.1: )
-Entering state 22
-Stack now 0 8 21 30 22
-Reading a token
-Next token is token "number" (1.1: 3)
-Shifting token "number" (1.1: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 3)
--> $$ = nterm exp (1.1: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Reading a token
-Next token is token '+' (1.1: )
-Reducing stack by rule 9 (line 100):
-   $1 = nterm exp (1.1: 2)
-   $2 = token '*' (1.1: )
-   $3 = nterm exp (1.1: 3)
--> $$ = nterm exp (1.1: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.1: )
-Reducing stack by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 6)
--> $$ = nterm exp (1.1: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '!' (1.1: )
-Shifting token '!' (1.1: )
-Entering state 5
-Stack now 0 8 21 5
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 14
-Stack now 0 8 21 5 14
-Reducing stack by rule 17 (line 122):
-   $1 = token '!' (1.1: )
-   $2 = token '+' (1.1: )
-Stack now 0 8 21
-Cleanup: popping token '+' (1.1: )
-Cleanup: popping nterm exp (1.1: 7)
-./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token "number" (1.1: 2)
-Shifting token "number" (1.1: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 2)
--> $$ = nterm exp (1.1: 2)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.1: )
-Shifting token '*' (1.1: )
-Entering state 22
-Stack now 0 8 21 30 22
-Reading a token
-Next token is token "number" (1.1: 3)
-Shifting token "number" (1.1: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 3)
--> $$ = nterm exp (1.1: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Reading a token
-Next token is token '+' (1.1: )
-Reducing stack by rule 9 (line 100):
-   $1 = nterm exp (1.1: 2)
-   $2 = token '*' (1.1: )
-   $3 = nterm exp (1.1: 3)
--> $$ = nterm exp (1.1: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.1: )
-Reducing stack by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 6)
--> $$ = nterm exp (1.1: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '!' (1.1: )
-Shifting token '!' (1.1: )
-Entering state 5
-Stack now 0 8 21 5
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 14
-Stack now 0 8 21 5 14
-Reducing stack by rule 17 (line 122):
-   $1 = token '!' (1.1: )
-   $2 = token '+' (1.1: )
-Stack now 0 8 21
-Cleanup: popping token '+' (1.1: )
-Cleanup: popping nterm exp (1.1: 7)
-./calc.at:1353: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1353:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token "number" (1.1: 2)
-Shifting token "number" (1.1: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 2)
--> $$ = nterm exp (1.1: 2)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.1: )
-Shifting token '*' (1.1: )
-Entering state 22
-Stack now 0 8 21 30 22
-Reading a token
-Next token is token "number" (1.1: 3)
-Shifting token "number" (1.1: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 3)
--> $$ = nterm exp (1.1: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Reading a token
-Next token is token '+' (1.1: )
-Reducing stack by rule 9 (line 100):
-   $1 = nterm exp (1.1: 2)
-   $2 = token '*' (1.1: )
-   $3 = nterm exp (1.1: 3)
--> $$ = nterm exp (1.1: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.1: )
-Reducing stack by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 6)
--> $$ = nterm exp (1.1: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '!' (1.1: )
-Shifting token '!' (1.1: )
-Entering state 5
-Stack now 0 8 21 5
-Reading a token
-Next token is token '-' (1.1: )
-Shifting token '-' (1.1: )
-Entering state 13
-Stack now 0 8 21 5 13
-Reducing stack by rule 18 (line 123):
-   $1 = token '!' (1.1: )
-   $2 = token '-' (1.1: )
-Stack now 0 8 21
-Cleanup: popping token '+' (1.1: )
-Cleanup: popping nterm exp (1.1: 7)
-./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token "number" (1.1: 2)
-Shifting token "number" (1.1: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 2)
--> $$ = nterm exp (1.1: 2)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.1: )
-Shifting token '*' (1.1: )
-Entering state 22
-Stack now 0 8 21 30 22
-Reading a token
-Next token is token "number" (1.1: 3)
-Shifting token "number" (1.1: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 3)
--> $$ = nterm exp (1.1: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Reading a token
-Next token is token '+' (1.1: )
-Reducing stack by rule 9 (line 100):
-   $1 = nterm exp (1.1: 2)
-   $2 = token '*' (1.1: )
-   $3 = nterm exp (1.1: 3)
--> $$ = nterm exp (1.1: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.1: )
-Reducing stack by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 6)
--> $$ = nterm exp (1.1: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '!' (1.1: )
-Shifting token '!' (1.1: )
-Entering state 5
-Stack now 0 8 21 5
-Reading a token
-Next token is token '-' (1.1: )
-Shifting token '-' (1.1: )
-Entering state 13
-Stack now 0 8 21 5 13
-Reducing stack by rule 18 (line 123):
-   $1 = token '!' (1.1: )
-   $2 = token '-' (1.1: )
-Stack now 0 8 21
-Cleanup: popping token '+' (1.1: )
-Cleanup: popping nterm exp (1.1: 7)
-./calc.at:1353: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1353: cat stderr
-input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1353:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token "number" (1.1: 2)
-Shifting token "number" (1.1: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 2)
--> $$ = nterm exp (1.1: 2)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.1: )
-Shifting token '*' (1.1: )
-Entering state 22
-Stack now 0 8 21 30 22
-Reading a token
-Next token is token "number" (1.1: 3)
-Shifting token "number" (1.1: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 3)
--> $$ = nterm exp (1.1: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Reading a token
-Next token is token '+' (1.1: )
-Reducing stack by rule 9 (line 100):
-   $1 = nterm exp (1.1: 2)
-   $2 = token '*' (1.1: )
-   $3 = nterm exp (1.1: 3)
--> $$ = nterm exp (1.1: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.1: )
-Reducing stack by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 6)
--> $$ = nterm exp (1.1: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '!' (1.1: )
-Shifting token '!' (1.1: )
-Entering state 5
-Stack now 0 8 21 5
-Reading a token
-Next token is token '*' (1.1: )
-Shifting token '*' (1.1: )
-Entering state 15
-Stack now 0 8 21 5 15
-Reducing stack by rule 19 (line 124):
-   $1 = token '!' (1.1: )
-   $2 = token '*' (1.1: )
-memory exhausted
-Stack now 0 8 21
-Cleanup: popping token '+' (1.1: )
-Cleanup: popping nterm exp (1.1: 7)
-./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token "number" (1.1: 2)
-Shifting token "number" (1.1: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 2)
--> $$ = nterm exp (1.1: 2)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.1: )
-Shifting token '*' (1.1: )
-Entering state 22
-Stack now 0 8 21 30 22
-Reading a token
-Next token is token "number" (1.1: 3)
-Shifting token "number" (1.1: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 3)
--> $$ = nterm exp (1.1: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Reading a token
-Next token is token '+' (1.1: )
-Reducing stack by rule 9 (line 100):
-   $1 = nterm exp (1.1: 2)
-   $2 = token '*' (1.1: )
-   $3 = nterm exp (1.1: 3)
--> $$ = nterm exp (1.1: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.1: )
-Reducing stack by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 6)
--> $$ = nterm exp (1.1: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '!' (1.1: )
-Shifting token '!' (1.1: )
-Entering state 5
-Stack now 0 8 21 5
-Reading a token
-Next token is token '*' (1.1: )
-Shifting token '*' (1.1: )
-Entering state 15
-Stack now 0 8 21 5 15
-Reducing stack by rule 19 (line 124):
-   $1 = token '!' (1.1: )
-   $2 = token '*' (1.1: )
-memory exhausted
-Stack now 0 8 21
-Cleanup: popping token '+' (1.1: )
-Cleanup: popping nterm exp (1.1: 7)
-./calc.at:1353: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1353: cat stderr
-input:
-  | (#) + (#) = 2222
-./calc.at:1353:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-syntax error: invalid character: '#'
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 4 11
-Next token is token "invalid token" (1.1: )
-Error: discarding token "invalid token" (1.1: )
-Error: popping token error (1.1: )
-Stack now 0 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-syntax error: invalid character: '#'
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token "invalid token" (1.1: )
-Error: discarding token "invalid token" (1.1: )
-Error: popping token error (1.1: )
-Stack now 0 8 21 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '=' (1.1: )
-Reducing stack by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.1: )
-Shifting token '=' (1.1: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token "number" (1.1: 2222)
-Shifting token "number" (1.1: 2222)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 2222)
--> $$ = nterm exp (1.1: 2222)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack by rule 6 (line 82):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '=' (1.1: )
-   $3 = nterm exp (1.1: 2222)
--> $$ = nterm exp (1.1: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 77):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (1.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-syntax error: invalid character: '#'
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 4 11
-Next token is token "invalid token" (1.1: )
-Error: discarding token "invalid token" (1.1: )
-Error: popping token error (1.1: )
-Stack now 0 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-syntax error: invalid character: '#'
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token "invalid token" (1.1: )
-Error: discarding token "invalid token" (1.1: )
-Error: popping token error (1.1: )
-Stack now 0 8 21 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '=' (1.1: )
-Reducing stack by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.1: )
-Shifting token '=' (1.1: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token "number" (1.1: 2222)
-Shifting token "number" (1.1: 2222)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 2222)
--> $$ = nterm exp (1.1: 2222)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack by rule 6 (line 82):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '=' (1.1: )
-   $3 = nterm exp (1.1: 2222)
--> $$ = nterm exp (1.1: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 77):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (1.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-./calc.at:1353: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1353: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1353:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-syntax error: invalid character: '#'
-Error: popping token '+' (1.1: )
-Stack now 0 4 12
-Error: popping nterm exp (1.1: 1)
-Stack now 0 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 4 11
-Next token is token "invalid token" (1.1: )
-Error: discarding token "invalid token" (1.1: )
-Error: popping token error (1.1: )
-Stack now 0 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.1: )
-Shifting token '=' (1.1: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token "number" (1.1: 1111)
-Shifting token "number" (1.1: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack by rule 6 (line 82):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '=' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 77):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (1.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-stdout:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-syntax error: invalid character: '#'
-Error: popping token '+' (1.1: )
-Stack now 0 4 12
-Error: popping nterm exp (1.1: 1)
-Stack now 0 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 4 11
-Next token is token "invalid token" (1.1: )
-Error: discarding token "invalid token" (1.1: )
-Error: popping token error (1.1: )
-Stack now 0 4
-Shifting token error (1.1: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.1: )
-Shifting token '=' (1.1: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token "number" (1.1: 1111)
-Shifting token "number" (1.1: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack by rule 6 (line 82):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '=' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 77):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (1.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-./calc.at:1354: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
-./calc.at:1354: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c calc.h
-
-input:
-./calc.at:1353: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1354:  $PREPARSER ./calc  input
-./calc.at:1353: cat stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack by rule 9 (line 120):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '=' (1.11: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.13: 7)
-Shifting token number (1.13: 7)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.13: 7)
--> $$ = nterm exp (1.13: 7)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.14-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-9: 7)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13: 7)
--> $$ = nterm exp (1.1-13: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.14-2.0: )
-Shifting token '\n' (1.14-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-13: 7)
-   $2 = token '\n' (1.14-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token number (2.1: 1)
-Shifting token number (2.1: 1)
-Entering state 1
-Stack now 0 6 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (2.1: 1)
--> $$ = nterm exp (2.1: 1)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '+' (2.3: )
-Shifting token '+' (2.3: )
-Entering state 21
-Stack now 0 6 8 21
-Reading a token
-Next token is token number (2.5: 2)
-Shifting token number (2.5: 2)
-Entering state 1
-Stack now 0 6 8 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (2.5: 2)
--> $$ = nterm exp (2.5: 2)
-Entering state 30
-Stack now 0 6 8 21 30
-Reading a token
-Next token is token '*' (2.7: )
-Shifting token '*' (2.7: )
-Entering state 22
-Stack now 0 6 8 21 30 22
-Reading a token
-Next token is token '-' (2.9: )
-Shifting token '-' (2.9: )
-Entering state 2
-Stack now 0 6 8 21 30 22 2
-Reading a token
-Next token is token number (2.10: 3)
-Shifting token number (2.10: 3)
-Entering state 1
-Stack now 0 6 8 21 30 22 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (2.10: 3)
--> $$ = nterm exp (2.10: 3)
-Entering state 10
-Stack now 0 6 8 21 30 22 2 10
+Stack now 0 6 8 21 30 22 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (2.10: 3)
+-> $$ = nterm exp (2.10: 3)
+Entering state 10
+Stack now 0 6 8 21 30 22 2 10
 Reading a token
 Next token is token '=' (2.12: )
 Reducing stack by rule 11 (line 136):
@@ -38406,7 +36619,11 @@
 Cleanup: popping token end of file (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
 ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+stderr:
 stderr:
+input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -39425,10 +37642,22 @@
 Cleanup: popping token end of file (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
 ./calc.at:1354: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | (# + 1) = 1111
+1.14: memory exhausted
+./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1350: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | (- *) + (1 2) = 1
 ./calc.at:1353:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1350: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -39438,67 +37667,120 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-syntax error: invalid character: '#'
+Next token is token '-' (1.1: )
+Shifting token '-' (1.1: )
+Entering state 2
+Stack now 0 4 2
+Reading a token
+Next token is token '*' (1.1: )
+syntax error
 Shifting token error (1.1: )
-Entering state 11
-Stack now 0 4 11
-Next token is token "invalid token" (1.1: )
-Error: discarding token "invalid token" (1.1: )
-Error: popping token error (1.1: )
+Entering state 9
+Stack now 0 4 2 9
+Reducing stack by rule 15 (line 120):
+   $1 = token '-' (1.1: )
+   $2 = token error (1.1: )
 Stack now 0 4
 Shifting token error (1.1: )
 Entering state 11
 Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.1: )
-Error: discarding token '+' (1.1: )
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
 Error: popping token error (1.1: )
 Stack now 0 4
 Shifting token error (1.1: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
 Next token is token "number" (1.1: 1)
-Error: discarding token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Stack now 0 8 21 4 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Reading a token
+Next token is token "number" (1.1: 2)
+syntax error
+Error: popping nterm exp (1.1: 1)
+Stack now 0 8 21 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token "number" (1.1: 2)
+Error: discarding token "number" (1.1: 2)
 Error: popping token error (1.1: )
-Stack now 0 4
+Stack now 0 8 21 4
 Shifting token error (1.1: )
 Entering state 11
-Stack now 0 4 11
+Stack now 0 8 21 4 11
 Reading a token
 Next token is token ')' (1.1: )
 Shifting token ')' (1.1: )
 Entering state 26
-Stack now 0 4 11 26
+Stack now 0 8 21 4 11 26
 Reducing stack by rule 14 (line 119):
    $1 = token '(' (1.1: )
    $2 = token error (1.1: )
    $3 = token ')' (1.1: )
 -> $$ = nterm exp (1.1: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '=' (1.1: )
+Reducing stack by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 2222)
 Entering state 8
 Stack now 0 8
-Reading a token
 Next token is token '=' (1.1: )
 Shifting token '=' (1.1: )
 Entering state 19
 Stack now 0 8 19
 Reading a token
-Next token is token "number" (1.1: 1111)
-Shifting token "number" (1.1: 1111)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Stack now 0 8 19 1
 Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 28
 Stack now 0 8 19 28
 Reading a token
 Next token is token '\n' (1.1: )
 Reducing stack by rule 6 (line 82):
-   $1 = nterm exp (1.1: 1111)
+   $1 = nterm exp (1.1: 2222)
    $2 = token '=' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
+   $3 = nterm exp (1.1: 1)
+error: 2222 != 1
+-> $$ = nterm exp (1.1: 2222)
 Entering state 8
 Stack now 0 8
 Next token is token '\n' (1.1: )
@@ -39506,7 +37788,7 @@
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 77):
-   $1 = nterm exp (1.1: 1111)
+   $1 = nterm exp (1.1: 2222)
    $2 = token '\n' (1.1: )
 -> $$ = nterm line (1.1: )
 Entering state 7
@@ -39525,10 +37807,11 @@
 Cleanup: popping token "end of input" (1.1: )
 Cleanup: popping nterm input (1.1: )
 ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.14: memory exhausted
 input:
-  | 1 2
-./calc.at:1354:  $PREPARSER ./calc  input
 stderr:
+input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -39538,67 +37821,120 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-syntax error: invalid character: '#'
+Next token is token '-' (1.1: )
+Shifting token '-' (1.1: )
+Entering state 2
+Stack now 0 4 2
+Reading a token
+Next token is token '*' (1.1: )
+syntax error
 Shifting token error (1.1: )
-Entering state 11
-Stack now 0 4 11
-Next token is token "invalid token" (1.1: )
-Error: discarding token "invalid token" (1.1: )
-Error: popping token error (1.1: )
+Entering state 9
+Stack now 0 4 2 9
+Reducing stack by rule 15 (line 120):
+   $1 = token '-' (1.1: )
+   $2 = token error (1.1: )
 Stack now 0 4
 Shifting token error (1.1: )
 Entering state 11
 Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.1: )
-Error: discarding token '+' (1.1: )
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
 Error: popping token error (1.1: )
 Stack now 0 4
 Shifting token error (1.1: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
 Next token is token "number" (1.1: 1)
-Error: discarding token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Stack now 0 8 21 4 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Reading a token
+Next token is token "number" (1.1: 2)
+syntax error
+Error: popping nterm exp (1.1: 1)
+Stack now 0 8 21 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token "number" (1.1: 2)
+Error: discarding token "number" (1.1: 2)
 Error: popping token error (1.1: )
-Stack now 0 4
+Stack now 0 8 21 4
 Shifting token error (1.1: )
 Entering state 11
-Stack now 0 4 11
+Stack now 0 8 21 4 11
 Reading a token
 Next token is token ')' (1.1: )
 Shifting token ')' (1.1: )
 Entering state 26
-Stack now 0 4 11 26
+Stack now 0 8 21 4 11 26
 Reducing stack by rule 14 (line 119):
    $1 = token '(' (1.1: )
    $2 = token error (1.1: )
    $3 = token ')' (1.1: )
 -> $$ = nterm exp (1.1: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '=' (1.1: )
+Reducing stack by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 2222)
 Entering state 8
 Stack now 0 8
-Reading a token
 Next token is token '=' (1.1: )
 Shifting token '=' (1.1: )
 Entering state 19
 Stack now 0 8 19
 Reading a token
-Next token is token "number" (1.1: 1111)
-Shifting token "number" (1.1: 1111)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Stack now 0 8 19 1
 Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 28
 Stack now 0 8 19 28
 Reading a token
 Next token is token '\n' (1.1: )
 Reducing stack by rule 6 (line 82):
-   $1 = nterm exp (1.1: 1111)
+   $1 = nterm exp (1.1: 2222)
    $2 = token '=' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
+   $3 = nterm exp (1.1: 1)
+error: 2222 != 1
+-> $$ = nterm exp (1.1: 2222)
 Entering state 8
 Stack now 0 8
 Next token is token '\n' (1.1: )
@@ -39606,7 +37942,7 @@
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 77):
-   $1 = nterm exp (1.1: 1111)
+   $1 = nterm exp (1.1: 2222)
    $2 = token '\n' (1.1: )
 -> $$ = nterm line (1.1: )
 Entering state 7
@@ -39624,6 +37960,33 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (1.1: )
 Cleanup: popping nterm input (1.1: )
+  | (1 + #) = 1111
+./calc.at:1350:  $PREPARSER ./calc  input
+  | 1 2
+./calc.at:1354:  $PREPARSER ./calc  input
+stderr:
+1.6: syntax error: invalid character: '#'
+./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1351: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+486. calc.at:1355: testing Calculator parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc  ...
+./calc.at:1355:
+    if "$POSIXLY_CORRECT_IS_EXPORTED"; then
+      sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y
+    else
+      mv calc.y.tmp calc.y
+    fi
+
+
 stderr:
 Starting parse
 Entering state 0
@@ -39645,8 +38008,18 @@
 Stack now 0
 Cleanup: discarding lookahead token number (1.3: 2)
 Stack now 0
+./calc.at:1351: cat stderr
 ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.6: syntax error: invalid character: '#'
+input:
+  | (#) + (#) = 2222
+./calc.at:1351:  $PREPARSER ./calc  input
 stderr:
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
 Starting parse
 Entering state 0
 Stack now 0
@@ -39677,7 +38050,21 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1354: "$PERL" -pi -e 'use strict;
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1350: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1350: cat stderr
+./calc.at:1351: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -39688,10 +38075,25 @@
   }eg
 ' expout || exit 77
 ./calc.at:1353: cat stderr
-./calc.at:1354: cat stderr
+./calc.at:1354: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 input:
-  | (1 + # + 1) = 1111
+input:
+  | (* *) + (*) + (*)
 ./calc.at:1353:  $PREPARSER ./calc  input
+./calc.at:1351: cat stderr
+  | (# + 1) = 1111
+./calc.at:1350:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1354: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -39702,86 +38104,123 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-syntax error: invalid character: '#'
-Error: popping token '+' (1.1: )
-Stack now 0 4 12
-Error: popping nterm exp (1.1: 1)
-Stack now 0 4
+Next token is token '*' (1.1: )
+syntax error
 Shifting token error (1.1: )
 Entering state 11
 Stack now 0 4 11
-Next token is token "invalid token" (1.1: )
-Error: discarding token "invalid token" (1.1: )
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
 Error: popping token error (1.1: )
 Stack now 0 4
 Shifting token error (1.1: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token '+' (1.1: )
-Error: discarding token '+' (1.1: )
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
 Error: popping token error (1.1: )
 Stack now 0 4
 Shifting token error (1.1: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token "number" (1.1: 1)
-Error: discarding token "number" (1.1: 1)
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+Next token is token '*' (1.1: )
+syntax error
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
 Error: popping token error (1.1: )
-Stack now 0 4
+Stack now 0 8 21 4
 Shifting token error (1.1: )
 Entering state 11
-Stack now 0 4 11
+Stack now 0 8 21 4 11
 Reading a token
 Next token is token ')' (1.1: )
 Shifting token ')' (1.1: )
 Entering state 26
-Stack now 0 4 11 26
+Stack now 0 8 21 4 11 26
 Reducing stack by rule 14 (line 119):
    $1 = token '(' (1.1: )
    $2 = token error (1.1: )
    $3 = token ')' (1.1: )
 -> $$ = nterm exp (1.1: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '+' (1.1: )
+Reducing stack by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 2222)
 Entering state 8
 Stack now 0 8
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
-Next token is token '=' (1.1: )
-Shifting token '=' (1.1: )
-Entering state 19
-Stack now 0 8 19
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 8 21 4
 Reading a token
-Next token is token "number" (1.1: 1111)
-Shifting token "number" (1.1: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1111)
+Next token is token '*' (1.1: )
+syntax error
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
+Error: popping token error (1.1: )
+Stack now 0 8 21 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
 -> $$ = nterm exp (1.1: 1111)
-Entering state 28
-Stack now 0 8 19 28
+Entering state 30
+Stack now 0 8 21 30
 Reading a token
 Next token is token '\n' (1.1: )
-Reducing stack by rule 6 (line 82):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '=' (1.1: )
+Reducing stack by rule 7 (line 98):
+   $1 = nterm exp (1.1: 2222)
+   $2 = token '+' (1.1: )
    $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 3333)
 Entering state 8
 Stack now 0 8
 Next token is token '\n' (1.1: )
@@ -39789,7 +38228,7 @@
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 77):
-   $1 = nterm exp (1.1: 1111)
+   $1 = nterm exp (1.1: 3333)
    $2 = token '\n' (1.1: )
 -> $$ = nterm line (1.1: )
 Entering state 7
@@ -39807,12 +38246,17 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (1.1: )
 Cleanup: popping nterm input (1.1: )
-input:
 ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1//2
-./calc.at:1354:  $PREPARSER ./calc  input
+1.2: syntax error: invalid character: '#'
+./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+input:
+1.2: syntax error: invalid character: '#'
+  | (1 + #) = 1111
+./calc.at:1351:  $PREPARSER ./calc  input
 stderr:
 stderr:
+1.6: syntax error: invalid character: '#'
 Starting parse
 Entering state 0
 Stack now 0
@@ -39822,86 +38266,123 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-syntax error: invalid character: '#'
-Error: popping token '+' (1.1: )
-Stack now 0 4 12
-Error: popping nterm exp (1.1: 1)
-Stack now 0 4
+Next token is token '*' (1.1: )
+syntax error
 Shifting token error (1.1: )
 Entering state 11
 Stack now 0 4 11
-Next token is token "invalid token" (1.1: )
-Error: discarding token "invalid token" (1.1: )
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
 Error: popping token error (1.1: )
 Stack now 0 4
 Shifting token error (1.1: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token '+' (1.1: )
-Error: discarding token '+' (1.1: )
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
 Error: popping token error (1.1: )
 Stack now 0 4
 Shifting token error (1.1: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token "number" (1.1: 1)
-Error: discarding token "number" (1.1: 1)
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+Next token is token '*' (1.1: )
+syntax error
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
 Error: popping token error (1.1: )
-Stack now 0 4
+Stack now 0 8 21 4
 Shifting token error (1.1: )
 Entering state 11
-Stack now 0 4 11
+Stack now 0 8 21 4 11
 Reading a token
 Next token is token ')' (1.1: )
 Shifting token ')' (1.1: )
 Entering state 26
-Stack now 0 4 11 26
+Stack now 0 8 21 4 11 26
 Reducing stack by rule 14 (line 119):
    $1 = token '(' (1.1: )
    $2 = token error (1.1: )
    $3 = token ')' (1.1: )
 -> $$ = nterm exp (1.1: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '+' (1.1: )
+Reducing stack by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 2222)
 Entering state 8
 Stack now 0 8
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
-Next token is token '=' (1.1: )
-Shifting token '=' (1.1: )
-Entering state 19
-Stack now 0 8 19
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 8 21 4
 Reading a token
-Next token is token "number" (1.1: 1111)
-Shifting token "number" (1.1: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1111)
+Next token is token '*' (1.1: )
+syntax error
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
+Error: popping token error (1.1: )
+Stack now 0 8 21 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
 -> $$ = nterm exp (1.1: 1111)
-Entering state 28
-Stack now 0 8 19 28
+Entering state 30
+Stack now 0 8 21 30
 Reading a token
 Next token is token '\n' (1.1: )
-Reducing stack by rule 6 (line 82):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '=' (1.1: )
+Reducing stack by rule 7 (line 98):
+   $1 = nterm exp (1.1: 2222)
+   $2 = token '+' (1.1: )
    $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 3333)
 Entering state 8
 Stack now 0 8
 Next token is token '\n' (1.1: )
@@ -39909,7 +38390,7 @@
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 77):
-   $1 = nterm exp (1.1: 1111)
+   $1 = nterm exp (1.1: 3333)
    $2 = token '\n' (1.1: )
 -> $$ = nterm line (1.1: )
 Entering state 7
@@ -39927,6 +38408,25 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (1.1: )
 Cleanup: popping nterm input (1.1: )
+./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
+1.6: syntax error: invalid character: '#'
+  | 1//2
+./calc.at:1354:  $PREPARSER ./calc  input
+./calc.at:1350: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+./calc.at:1355: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
+./calc.at:1350: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -39955,7 +38455,31 @@
 Cleanup: discarding lookahead token '/' (1.3: )
 Stack now 0
 ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1353: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+./calc.at:1351: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | (1 + # + 1) = 1111
+./calc.at:1350:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1351: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -39983,6 +38507,10 @@
 Stack now 0
 Cleanup: discarding lookahead token '/' (1.3: )
 Stack now 0
+stderr:
+1.6: syntax error: invalid character: '#'
+./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1353: cat stderr
 ./calc.at:1354: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -39993,7 +38521,14 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1353: "$PERL" -pi -e 'use strict;
+stderr:
+input:
+1.6: syntax error: invalid character: '#'
+./calc.at:1354: cat stderr
+  | (# + 1) = 1111
+./calc.at:1351:  $PREPARSER ./calc  input
+input:
+./calc.at:1350: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -40003,159 +38538,204 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1354: cat stderr
-./calc.at:1353: cat stderr
-input:
-input:
-  | (1 + 1) / (1 - 1)
+  | 1 + 2 * 3 + !+ ++
 ./calc.at:1353:  $PREPARSER ./calc  input
-  | error
 stderr:
-./calc.at:1354:  $PREPARSER ./calc  input
+1.2: syntax error: invalid character: '#'
+./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+./calc.at:1350: cat stderr
+input:
+1.2: syntax error: invalid character: '#'
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
 Next token is token "number" (1.1: 1)
 Shifting token "number" (1.1: 1)
 Entering state 1
-Stack now 0 4 1
+Stack now 0 1
 Reducing stack by rule 5 (line 81):
    $1 = token "number" (1.1: 1)
 -> $$ = nterm exp (1.1: 1)
-Entering state 12
-Stack now 0 4 12
+Entering state 8
+Stack now 0 8
 Reading a token
 Next token is token '+' (1.1: )
 Shifting token '+' (1.1: )
 Entering state 21
-Stack now 0 4 12 21
+Stack now 0 8 21
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token "number" (1.1: 2)
+Shifting token "number" (1.1: 2)
 Entering state 1
-Stack now 0 4 12 21 1
+Stack now 0 8 21 1
 Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+   $1 = token "number" (1.1: 2)
+-> $$ = nterm exp (1.1: 2)
 Entering state 30
-Stack now 0 4 12 21 30
+Stack now 0 8 21 30
 Reading a token
-Next token is token ')' (1.1: )
+Next token is token '*' (1.1: )
+Shifting token '*' (1.1: )
+Entering state 22
+Stack now 0 8 21 30 22
+Reading a token
+Next token is token "number" (1.1: 3)
+Shifting token "number" (1.1: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 3)
+-> $$ = nterm exp (1.1: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.1: )
+Reducing stack by rule 9 (line 100):
+   $1 = nterm exp (1.1: 2)
+   $2 = token '*' (1.1: )
+   $3 = nterm exp (1.1: 3)
+-> $$ = nterm exp (1.1: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.1: )
 Reducing stack by rule 7 (line 98):
    $1 = nterm exp (1.1: 1)
    $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1)
--> $$ = nterm exp (1.1: 2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 27
-Stack now 0 4 12 27
-Reducing stack by rule 13 (line 118):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.1: 2)
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 2)
+   $3 = nterm exp (1.1: 6)
+-> $$ = nterm exp (1.1: 7)
 Entering state 8
 Stack now 0 8
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
-Next token is token '/' (1.1: )
-Shifting token '/' (1.1: )
-Entering state 23
-Stack now 0 8 23
+Next token is token '!' (1.1: )
+Shifting token '!' (1.1: )
+Entering state 5
+Stack now 0 8 21 5
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 8 23 4
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 14
+Stack now 0 8 21 5 14
+Reducing stack by rule 17 (line 122):
+   $1 = token '!' (1.1: )
+   $2 = token '+' (1.1: )
+Stack now 0 8 21
+Cleanup: popping token '+' (1.1: )
+Cleanup: popping nterm exp (1.1: 7)
+./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | error
+./calc.at:1354:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token invalid token (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token (1.1: )
+Stack now 0
+./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+input:
+./calc.at:1351: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+Starting parse
+Entering state 0
+Stack now 0
 Reading a token
 Next token is token "number" (1.1: 1)
 Shifting token "number" (1.1: 1)
 Entering state 1
-Stack now 0 8 23 4 1
+Stack now 0 1
 Reducing stack by rule 5 (line 81):
    $1 = token "number" (1.1: 1)
 -> $$ = nterm exp (1.1: 1)
-Entering state 12
-Stack now 0 8 23 4 12
+Entering state 8
+Stack now 0 8
 Reading a token
-Next token is token '-' (1.1: )
-Shifting token '-' (1.1: )
-Entering state 20
-Stack now 0 8 23 4 12 20
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token "number" (1.1: 2)
+Shifting token "number" (1.1: 2)
 Entering state 1
-Stack now 0 8 23 4 12 20 1
+Stack now 0 8 21 1
 Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 29
-Stack now 0 8 23 4 12 20 29
+   $1 = token "number" (1.1: 2)
+-> $$ = nterm exp (1.1: 2)
+Entering state 30
+Stack now 0 8 21 30
 Reading a token
-Next token is token ')' (1.1: )
-Reducing stack by rule 8 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '-' (1.1: )
-   $3 = nterm exp (1.1: 1)
--> $$ = nterm exp (1.1: 0)
-Entering state 12
-Stack now 0 8 23 4 12
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 27
-Stack now 0 8 23 4 12 27
-Reducing stack by rule 13 (line 118):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.1: 0)
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 0)
-Entering state 32
-Stack now 0 8 23 32
+Next token is token '*' (1.1: )
+Shifting token '*' (1.1: )
+Entering state 22
+Stack now 0 8 21 30 22
 Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack by rule 10 (line 101):
+Next token is token "number" (1.1: 3)
+Shifting token "number" (1.1: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 3)
+-> $$ = nterm exp (1.1: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.1: )
+Reducing stack by rule 9 (line 100):
    $1 = nterm exp (1.1: 2)
-   $2 = token '/' (1.1: )
-   $3 = nterm exp (1.1: 0)
-error: null divisor
--> $$ = nterm exp (1.1: 2)
+   $2 = token '*' (1.1: )
+   $3 = nterm exp (1.1: 3)
+-> $$ = nterm exp (1.1: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.1: )
+Reducing stack by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 6)
+-> $$ = nterm exp (1.1: 7)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 77):
-   $1 = nterm exp (1.1: 2)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
-Entering state 6
-Stack now 0 6
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
-Now at end of input.
-Shifting token "end of input" (1.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+Next token is token '!' (1.1: )
+Shifting token '!' (1.1: )
+Entering state 5
+Stack now 0 8 21 5
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 14
+Stack now 0 8 21 5 14
+Reducing stack by rule 17 (line 122):
+   $1 = token '!' (1.1: )
+   $2 = token '+' (1.1: )
+Stack now 0 8 21
+Cleanup: popping token '+' (1.1: )
+Cleanup: popping nterm exp (1.1: 7)
 stderr:
+./calc.at:1353: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+  | (1 + 1) / (1 - 1)
 Starting parse
 Entering state 0
 Stack now 0
@@ -40164,168 +38744,200 @@
 1.1: syntax error, unexpected invalid token
 Cleanup: discarding lookahead token invalid token (1.1: )
 Stack now 0
-./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1350:  $PREPARSER ./calc  input
+./calc.at:1351: cat stderr
+stderr:
+1.11-17: error: null divisor
+./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 1 + 2 * 3 + !- ++
+stderr:
+./calc.at:1353:  $PREPARSER ./calc  input
+input:
+./calc.at:1354: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+1.11-17: error: null divisor
+stderr:
+  | (1 + # + 1) = 1111
+./calc.at:1351:  $PREPARSER ./calc  input
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
 Next token is token "number" (1.1: 1)
 Shifting token "number" (1.1: 1)
 Entering state 1
-Stack now 0 4 1
+Stack now 0 1
 Reducing stack by rule 5 (line 81):
    $1 = token "number" (1.1: 1)
 -> $$ = nterm exp (1.1: 1)
-Entering state 12
-Stack now 0 4 12
+Entering state 8
+Stack now 0 8
 Reading a token
 Next token is token '+' (1.1: )
 Shifting token '+' (1.1: )
 Entering state 21
-Stack now 0 4 12 21
+Stack now 0 8 21
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token "number" (1.1: 2)
+Shifting token "number" (1.1: 2)
 Entering state 1
-Stack now 0 4 12 21 1
+Stack now 0 8 21 1
 Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+   $1 = token "number" (1.1: 2)
+-> $$ = nterm exp (1.1: 2)
 Entering state 30
-Stack now 0 4 12 21 30
+Stack now 0 8 21 30
 Reading a token
-Next token is token ')' (1.1: )
+Next token is token '*' (1.1: )
+Shifting token '*' (1.1: )
+Entering state 22
+Stack now 0 8 21 30 22
+Reading a token
+Next token is token "number" (1.1: 3)
+Shifting token "number" (1.1: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 3)
+-> $$ = nterm exp (1.1: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.1: )
+Reducing stack by rule 9 (line 100):
+   $1 = nterm exp (1.1: 2)
+   $2 = token '*' (1.1: )
+   $3 = nterm exp (1.1: 3)
+-> $$ = nterm exp (1.1: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.1: )
 Reducing stack by rule 7 (line 98):
    $1 = nterm exp (1.1: 1)
    $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1)
--> $$ = nterm exp (1.1: 2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 27
-Stack now 0 4 12 27
-Reducing stack by rule 13 (line 118):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.1: 2)
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 2)
+   $3 = nterm exp (1.1: 6)
+-> $$ = nterm exp (1.1: 7)
 Entering state 8
 Stack now 0 8
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
-Next token is token '/' (1.1: )
-Shifting token '/' (1.1: )
-Entering state 23
-Stack now 0 8 23
+Next token is token '!' (1.1: )
+Shifting token '!' (1.1: )
+Entering state 5
+Stack now 0 8 21 5
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 8 23 4
+Next token is token '-' (1.1: )
+Shifting token '-' (1.1: )
+Entering state 13
+Stack now 0 8 21 5 13
+Reducing stack by rule 18 (line 123):
+   $1 = token '!' (1.1: )
+   $2 = token '-' (1.1: )
+Stack now 0 8 21
+Cleanup: popping token '+' (1.1: )
+Cleanup: popping nterm exp (1.1: 7)
+./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1354: cat stderr
+input:
+stderr:
+1.6: syntax error: invalid character: '#'
+Starting parse
+Entering state 0
+Stack now 0
 Reading a token
 Next token is token "number" (1.1: 1)
 Shifting token "number" (1.1: 1)
 Entering state 1
-Stack now 0 8 23 4 1
+Stack now 0 1
 Reducing stack by rule 5 (line 81):
    $1 = token "number" (1.1: 1)
 -> $$ = nterm exp (1.1: 1)
-Entering state 12
-Stack now 0 8 23 4 12
+Entering state 8
+Stack now 0 8
 Reading a token
-Next token is token '-' (1.1: )
-Shifting token '-' (1.1: )
-Entering state 20
-Stack now 0 8 23 4 12 20
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token "number" (1.1: 2)
+Shifting token "number" (1.1: 2)
 Entering state 1
-Stack now 0 8 23 4 12 20 1
+Stack now 0 8 21 1
 Reducing stack by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 29
-Stack now 0 8 23 4 12 20 29
+   $1 = token "number" (1.1: 2)
+-> $$ = nterm exp (1.1: 2)
+Entering state 30
+Stack now 0 8 21 30
 Reading a token
-Next token is token ')' (1.1: )
-Reducing stack by rule 8 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '-' (1.1: )
-   $3 = nterm exp (1.1: 1)
--> $$ = nterm exp (1.1: 0)
-Entering state 12
-Stack now 0 8 23 4 12
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 27
-Stack now 0 8 23 4 12 27
-Reducing stack by rule 13 (line 118):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.1: 0)
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 0)
-Entering state 32
-Stack now 0 8 23 32
+Next token is token '*' (1.1: )
+Shifting token '*' (1.1: )
+Entering state 22
+Stack now 0 8 21 30 22
 Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack by rule 10 (line 101):
+Next token is token "number" (1.1: 3)
+Shifting token "number" (1.1: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 3)
+-> $$ = nterm exp (1.1: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.1: )
+Reducing stack by rule 9 (line 100):
    $1 = nterm exp (1.1: 2)
-   $2 = token '/' (1.1: )
-   $3 = nterm exp (1.1: 0)
-error: null divisor
--> $$ = nterm exp (1.1: 2)
+   $2 = token '*' (1.1: )
+   $3 = nterm exp (1.1: 3)
+-> $$ = nterm exp (1.1: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.1: )
+Reducing stack by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 6)
+-> $$ = nterm exp (1.1: 7)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 77):
-   $1 = nterm exp (1.1: 2)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
-Entering state 6
-Stack now 0 6
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
-Now at end of input.
-Shifting token "end of input" (1.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '!' (1.1: )
+Shifting token '!' (1.1: )
+Entering state 5
+Stack now 0 8 21 5
 Reading a token
-Next token is token invalid token (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token (1.1: )
-Stack now 0
-./calc.at:1353: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1354: "$PERL" -pi -e 'use strict;
+Next token is token '-' (1.1: )
+Shifting token '-' (1.1: )
+Entering state 13
+Stack now 0 8 21 5 13
+Reducing stack by rule 18 (line 123):
+   $1 = token '!' (1.1: )
+   $2 = token '-' (1.1: )
+Stack now 0 8 21
+Cleanup: popping token '+' (1.1: )
+Cleanup: popping nterm exp (1.1: 7)
+  | 1 = 2 = 3
+./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1354:  $PREPARSER ./calc  input
+./calc.at:1350: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -40335,12 +38947,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1354: cat stderr
-./calc.at:1353: cat stderr
-484. calc.at:1353: input:
- ok
-  | 1 = 2 = 3
-./calc.at:1354:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -40383,6 +38989,9 @@
 Stack now 0
 ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+1.6: syntax error: invalid character: '#'
+./calc.at:1350: cat stderr
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -40422,7 +39031,27 @@
 Stack now 0
 Cleanup: discarding lookahead token '=' (1.7: )
 Stack now 0
-
+./calc.at:1353: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+482. calc.at:1350:  ok
+./calc.at:1351: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1354: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -40433,11 +39062,21 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1353: cat stderr
 ./calc.at:1354: cat stderr
+./calc.at:1351: cat stderr
+
+input:
 input:
   | 
   | +1
 ./calc.at:1354:  $PREPARSER ./calc  input
+  | 1 + 2 * 3 + !* ++
+./calc.at:1353:  $PREPARSER ./calc  input
+input:
+stderr:
+  | (1 + 1) / (1 - 1)
+./calc.at:1351:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -40465,7 +39104,92 @@
 Cleanup: discarding lookahead token '+' (2.1: )
 Stack now 0
 ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token "number" (1.1: 2)
+Shifting token "number" (1.1: 2)
+Entering state 1
+Stack now 0 8 21 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 2)
+-> $$ = nterm exp (1.1: 2)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '*' (1.1: )
+Shifting token '*' (1.1: )
+Entering state 22
+Stack now 0 8 21 30 22
+Reading a token
+Next token is token "number" (1.1: 3)
+Shifting token "number" (1.1: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 3)
+-> $$ = nterm exp (1.1: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.1: )
+Reducing stack by rule 9 (line 100):
+   $1 = nterm exp (1.1: 2)
+   $2 = token '*' (1.1: )
+   $3 = nterm exp (1.1: 3)
+-> $$ = nterm exp (1.1: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.1: )
+Reducing stack by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 6)
+-> $$ = nterm exp (1.1: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '!' (1.1: )
+Shifting token '!' (1.1: )
+Entering state 5
+Stack now 0 8 21 5
+Reading a token
+Next token is token '*' (1.1: )
+Shifting token '*' (1.1: )
+Entering state 15
+Stack now 0 8 21 5 15
+Reducing stack by rule 19 (line 124):
+   $1 = token '!' (1.1: )
+   $2 = token '*' (1.1: )
+memory exhausted
+Stack now 0 8 21
+Cleanup: popping token '+' (1.1: )
+Cleanup: popping nterm exp (1.1: 7)
+stderr:
+./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+1.11-17: error: null divisor
+./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -40491,15 +39215,91 @@
 Stack now 0
 Cleanup: discarding lookahead token '+' (2.1: )
 Stack now 0
-488. calc.at:1358: testing Calculator api.push-pull=both api.pure=full parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc  ...
-./calc.at:1358:
-    if "$POSIXLY_CORRECT_IS_EXPORTED"; then
-      sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y
-    else
-      mv calc.y.tmp calc.y
-    fi
-
-
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token "number" (1.1: 2)
+Shifting token "number" (1.1: 2)
+Entering state 1
+Stack now 0 8 21 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 2)
+-> $$ = nterm exp (1.1: 2)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '*' (1.1: )
+Shifting token '*' (1.1: )
+Entering state 22
+Stack now 0 8 21 30 22
+Reading a token
+Next token is token "number" (1.1: 3)
+Shifting token "number" (1.1: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 3)
+-> $$ = nterm exp (1.1: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.1: )
+Reducing stack by rule 9 (line 100):
+   $1 = nterm exp (1.1: 2)
+   $2 = token '*' (1.1: )
+   $3 = nterm exp (1.1: 3)
+-> $$ = nterm exp (1.1: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.1: )
+Reducing stack by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 6)
+-> $$ = nterm exp (1.1: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '!' (1.1: )
+Shifting token '!' (1.1: )
+Entering state 5
+Stack now 0 8 21 5
+Reading a token
+Next token is token '*' (1.1: )
+Shifting token '*' (1.1: )
+Entering state 15
+Stack now 0 8 21 5 15
+Reducing stack by rule 19 (line 124):
+   $1 = token '!' (1.1: )
+   $2 = token '*' (1.1: )
+memory exhausted
+Stack now 0 8 21
+Cleanup: popping token '+' (1.1: )
+Cleanup: popping nterm exp (1.1: 7)
+stderr:
+1.11-17: error: null divisor
+stderr:
 ./calc.at:1354: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -40510,10 +39310,51 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1358: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+487. calc.at:1357: testing Calculator api.pure=full parse.error=detailed %debug %locations %header %name-prefix "calc" %verbose %yacc  ...
+stdout:
+./calc.at:1353: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1351: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1357:
+    if "$POSIXLY_CORRECT_IS_EXPORTED"; then
+      sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y
+    else
+      mv calc.y.tmp calc.y
+    fi
+
+
+./types.at:139:  $PREPARSER ./test
 ./calc.at:1354: cat stderr
+stderr:
+./calc.at:1351: cat stderr
+./calc.at:1353: cat stderr
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
 ./calc.at:1354:  $PREPARSER ./calc  /dev/null
+  | (#) + (#) = 2222
+./calc.at:1357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+./calc.at:1353:  $PREPARSER ./calc  input
+stderr:
+483. calc.at:1351:  ok
 stderr:
+450. types.at:139:  ok
 Starting parse
 Entering state 0
 Stack now 0
@@ -40523,6 +39364,250 @@
 Cleanup: discarding lookahead token end of file (1.1: )
 Stack now 0
 ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+syntax error: invalid character: '#'
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Next token is token "invalid token" (1.1: )
+Error: discarding token "invalid token" (1.1: )
+Error: popping token error (1.1: )
+Stack now 0 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+syntax error: invalid character: '#'
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token "invalid token" (1.1: )
+Error: discarding token "invalid token" (1.1: )
+Error: popping token error (1.1: )
+Stack now 0 8 21 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '=' (1.1: )
+Reducing stack by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.1: )
+Shifting token '=' (1.1: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token "number" (1.1: 2222)
+Shifting token "number" (1.1: 2222)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 2222)
+-> $$ = nterm exp (1.1: 2222)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.1: )
+Reducing stack by rule 6 (line 82):
+   $1 = nterm exp (1.1: 2222)
+   $2 = token '=' (1.1: )
+   $3 = nterm exp (1.1: 2222)
+-> $$ = nterm exp (1.1: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 77):
+   $1 = nterm exp (1.1: 2222)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (1.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
+./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+syntax error: invalid character: '#'
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Next token is token "invalid token" (1.1: )
+Error: discarding token "invalid token" (1.1: )
+Error: popping token error (1.1: )
+Stack now 0 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+syntax error: invalid character: '#'
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token "invalid token" (1.1: )
+Error: discarding token "invalid token" (1.1: )
+Error: popping token error (1.1: )
+Stack now 0 8 21 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '=' (1.1: )
+Reducing stack by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.1: )
+Shifting token '=' (1.1: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token "number" (1.1: 2222)
+Shifting token "number" (1.1: 2222)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 2222)
+-> $$ = nterm exp (1.1: 2222)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.1: )
+Reducing stack by rule 6 (line 82):
+   $1 = nterm exp (1.1: 2222)
+   $2 = token '=' (1.1: )
+   $3 = nterm exp (1.1: 2222)
+-> $$ = nterm exp (1.1: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 77):
+   $1 = nterm exp (1.1: 2222)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (1.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
 stderr:
 Starting parse
 Entering state 0
@@ -40542,9 +39627,23 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+
+
+./calc.at:1353: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1354: cat stderr
 input:
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1357: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
+./calc.at:1353: cat stderr
 ./calc.at:1354:  $PREPARSER ./calc  input
 stderr:
 Starting parse
@@ -40865,7 +39964,19 @@
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+488. calc.at:1358: testing Calculator api.push-pull=both api.pure=full parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc  ...
+./calc.at:1358:
+    if "$POSIXLY_CORRECT_IS_EXPORTED"; then
+      sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y
+    else
+      mv calc.y.tmp calc.y
+    fi
+
+
+  | (1 + #) = 1111
 stderr:
+./calc.at:1353:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -41183,6 +40294,118 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+489. calc.at:1360: testing Calculator api.pure parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
+./calc.at:1360:
+    if "$POSIXLY_CORRECT_IS_EXPORTED"; then
+      sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y
+    else
+      mv calc.y.tmp calc.y
+    fi
+
+
+./calc.at:1358: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+syntax error: invalid character: '#'
+Error: popping token '+' (1.1: )
+Stack now 0 4 12
+Error: popping nterm exp (1.1: 1)
+Stack now 0 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Next token is token "invalid token" (1.1: )
+Error: discarding token "invalid token" (1.1: )
+Error: popping token error (1.1: )
+Stack now 0 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.1: )
+Shifting token '=' (1.1: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token "number" (1.1: 1111)
+Shifting token "number" (1.1: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.1: )
+Reducing stack by rule 6 (line 82):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '=' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 77):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (1.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
+./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1360: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+stderr:
 ./calc.at:1354: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -41193,14 +40416,119 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+syntax error: invalid character: '#'
+Error: popping token '+' (1.1: )
+Stack now 0 4 12
+Error: popping nterm exp (1.1: 1)
+Stack now 0 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Next token is token "invalid token" (1.1: )
+Error: discarding token "invalid token" (1.1: )
+Error: popping token error (1.1: )
+Stack now 0 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.1: )
+Shifting token '=' (1.1: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token "number" (1.1: 1111)
+Shifting token "number" (1.1: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.1: )
+Reducing stack by rule 6 (line 82):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '=' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 77):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (1.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
 ./calc.at:1354: cat stderr
 input:
-./calc.at:1358: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
   | (!!) + (1 2) = 1
 ./calc.at:1354:  $PREPARSER ./calc  input
+./calc.at:1353: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
-stderr:
-stdout:
 Starting parse
 Entering state 0
 Stack now 0
@@ -41342,7 +40670,6 @@
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./types.at:139:  $PREPARSER ./test
 stderr:
 Starting parse
 Entering state 0
@@ -41484,7 +40811,7 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-stderr:
+./calc.at:1353: cat stderr
 ./calc.at:1354: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -41495,11 +40822,206 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+./calc.at:1358: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
+  | (# + 1) = 1111
+./calc.at:1353:  $PREPARSER ./calc  input
+./calc.at:1360: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
 ./calc.at:1354: cat stderr
-======== Testing with C++ standard flags: ''
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+syntax error: invalid character: '#'
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Next token is token "invalid token" (1.1: )
+Error: discarding token "invalid token" (1.1: )
+Error: popping token error (1.1: )
+Stack now 0 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.1: )
+Error: discarding token '+' (1.1: )
+Error: popping token error (1.1: )
+Stack now 0 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token "number" (1.1: 1)
+Error: discarding token "number" (1.1: 1)
+Error: popping token error (1.1: )
+Stack now 0 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.1: )
+Shifting token '=' (1.1: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token "number" (1.1: 1111)
+Shifting token "number" (1.1: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.1: )
+Reducing stack by rule 6 (line 82):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '=' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 77):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (1.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
+./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 input:
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+syntax error: invalid character: '#'
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Next token is token "invalid token" (1.1: )
+Error: discarding token "invalid token" (1.1: )
+Error: popping token error (1.1: )
+Stack now 0 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.1: )
+Error: discarding token '+' (1.1: )
+Error: popping token error (1.1: )
+Stack now 0 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token "number" (1.1: 1)
+Error: discarding token "number" (1.1: 1)
+Error: popping token error (1.1: )
+Stack now 0 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.1: )
+Shifting token '=' (1.1: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token "number" (1.1: 1111)
+Shifting token "number" (1.1: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.1: )
+Reducing stack by rule 6 (line 82):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '=' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 77):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (1.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
   | (- *) + (1 2) = 1
 ./calc.at:1354:  $PREPARSER ./calc  input
 stderr:
@@ -41653,6 +41175,16 @@
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1353: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Stack now 0
@@ -41811,10 +41343,131 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1353: cat stderr
 ./calc.at:1354: cat stderr
 input:
+  | (1 + # + 1) = 1111
+./calc.at:1353:  $PREPARSER ./calc  input
+input:
+stderr:
   | (* *) + (*) + (*)
 ./calc.at:1354:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+syntax error: invalid character: '#'
+Error: popping token '+' (1.1: )
+Stack now 0 4 12
+Error: popping nterm exp (1.1: 1)
+Stack now 0 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Next token is token "invalid token" (1.1: )
+Error: discarding token "invalid token" (1.1: )
+Error: popping token error (1.1: )
+Stack now 0 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.1: )
+Error: discarding token '+' (1.1: )
+Error: popping token error (1.1: )
+Stack now 0 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token "number" (1.1: 1)
+Error: discarding token "number" (1.1: 1)
+Error: popping token error (1.1: )
+Stack now 0 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.1: )
+Shifting token '=' (1.1: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token "number" (1.1: 1111)
+Shifting token "number" (1.1: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.1: )
+Reducing stack by rule 6 (line 82):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '=' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 77):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (1.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
+./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -41968,6 +41621,120 @@
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+syntax error: invalid character: '#'
+Error: popping token '+' (1.1: )
+Stack now 0 4 12
+Error: popping nterm exp (1.1: 1)
+Stack now 0 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Next token is token "invalid token" (1.1: )
+Error: discarding token "invalid token" (1.1: )
+Error: popping token error (1.1: )
+Stack now 0 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.1: )
+Error: discarding token '+' (1.1: )
+Error: popping token error (1.1: )
+Stack now 0 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token "number" (1.1: 1)
+Error: discarding token "number" (1.1: 1)
+Error: popping token error (1.1: )
+Stack now 0 4
+Shifting token error (1.1: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.1: )
+Shifting token '=' (1.1: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token "number" (1.1: 1111)
+Shifting token "number" (1.1: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.1: )
+Reducing stack by rule 6 (line 82):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '=' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 77):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (1.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
 stderr:
 Starting parse
 Entering state 0
@@ -42120,6 +41887,16 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1353: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1354: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -42131,12 +41908,15 @@
   }eg
 ' expout || exit 77
 ./calc.at:1354: cat stderr
+./calc.at:1353: cat stderr
 input:
   | 1 + 2 * 3 + !+ ++
 ./calc.at:1354:  $PREPARSER ./calc  input
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1353:  $PREPARSER ./calc  input
 stderr:
 stderr:
-stdout:
 Starting parse
 Entering state 0
 Stack now 0
@@ -42218,158 +41998,450 @@
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
 ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./types.at:139: ./check
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
+Stack now 0 4 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
 -> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
+Entering state 12
+Stack now 0 4 12
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
 Entering state 21
-Stack now 0 8 21
+Stack now 0 4 12 21
 Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
+Stack now 0 4 12 21 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
+Stack now 0 4 12 21 30
 Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 120):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 118):
+Next token is token ')' (1.1: )
+Reducing stack by rule 7 (line 98):
    $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1)
+-> $$ = nterm exp (1.1: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 27
+Stack now 0 4 12 27
+Reducing stack by rule 13 (line 118):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.1: 2)
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 2)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Stack now 0 8 21
 Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
+Next token is token '/' (1.1: )
+Shifting token '/' (1.1: )
+Entering state 23
+Stack now 0 8 23
 Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Stack now 0 8 21 5 14
-Reducing stack by rule 17 (line 142):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1354: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1354:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 8 23 4
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
+Stack now 0 8 23 4 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
 -> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Stack now 0 8 21 30
+Entering state 12
+Stack now 0 8 23 4 12
 Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
+Next token is token '-' (1.1: )
+Shifting token '-' (1.1: )
+Entering state 20
+Stack now 0 8 23 4 12 20
 Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
+Stack now 0 8 23 4 12 20 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 29
+Stack now 0 8 23 4 12 20 29
 Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 120):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 118):
+Next token is token ')' (1.1: )
+Reducing stack by rule 8 (line 99):
    $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Stack now 0 8 21
+   $2 = token '-' (1.1: )
+   $3 = nterm exp (1.1: 1)
+-> $$ = nterm exp (1.1: 0)
+Entering state 12
+Stack now 0 8 23 4 12
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 27
+Stack now 0 8 23 4 12 27
+Reducing stack by rule 13 (line 118):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.1: 0)
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 0)
+Entering state 32
+Stack now 0 8 23 32
+Reading a token
+Next token is token '\n' (1.1: )
+Reducing stack by rule 10 (line 101):
+   $1 = nterm exp (1.1: 2)
+   $2 = token '/' (1.1: )
+   $3 = nterm exp (1.1: 0)
+error: null divisor
+-> $$ = nterm exp (1.1: 2)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 77):
+   $1 = nterm exp (1.1: 2)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (1.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
+./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Stack now 0 4 12 21 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 30
+Stack now 0 4 12 21 30
+Reading a token
+Next token is token ')' (1.1: )
+Reducing stack by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1)
+-> $$ = nterm exp (1.1: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 27
+Stack now 0 4 12 27
+Reducing stack by rule 13 (line 118):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.1: 2)
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 2)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' (1.1: )
+Shifting token '/' (1.1: )
+Entering state 23
+Stack now 0 8 23
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 8 23 4
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Stack now 0 8 23 4 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 12
+Stack now 0 8 23 4 12
+Reading a token
+Next token is token '-' (1.1: )
+Shifting token '-' (1.1: )
+Entering state 20
+Stack now 0 8 23 4 12 20
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Stack now 0 8 23 4 12 20 1
+Reducing stack by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 29
+Stack now 0 8 23 4 12 20 29
+Reading a token
+Next token is token ')' (1.1: )
+Reducing stack by rule 8 (line 99):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '-' (1.1: )
+   $3 = nterm exp (1.1: 1)
+-> $$ = nterm exp (1.1: 0)
+Entering state 12
+Stack now 0 8 23 4 12
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 27
+Stack now 0 8 23 4 12 27
+Reducing stack by rule 13 (line 118):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.1: 0)
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 0)
+Entering state 32
+Stack now 0 8 23 32
+Reading a token
+Next token is token '\n' (1.1: )
+Reducing stack by rule 10 (line 101):
+   $1 = nterm exp (1.1: 2)
+   $2 = token '/' (1.1: )
+   $3 = nterm exp (1.1: 0)
+error: null divisor
+-> $$ = nterm exp (1.1: 2)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 77):
+   $1 = nterm exp (1.1: 2)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (1.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 120):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 118):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
+Reading a token
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Stack now 0 8 21 5 14
+Reducing stack by rule 17 (line 142):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1354: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+input:
+./calc.at:1353: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | 1 + 2 * 3 + !- ++
+./calc.at:1354:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 120):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 118):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
 Next token is token '!' (1.13: )
 Shifting token '!' (1.13: )
@@ -42468,7 +42540,8 @@
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1354: "$PERL" -pi -e 'use strict;
+./calc.at:1353: cat stderr
+484. calc.at:1353: ./calc.at:1354: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -42478,10 +42551,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+ ok
 ./calc.at:1354: cat stderr
 input:
   | 1 + 2 * 3 + !* ++
 ./calc.at:1354:  $PREPARSER ./calc  input
+
 stderr:
 Starting parse
 Entering state 0
@@ -42565,7 +42640,6 @@
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
 ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 Starting parse
 Entering state 0
@@ -42659,10 +42733,33 @@
   }eg
 ' expout || exit 77
 ./calc.at:1354: cat stderr
+stderr:
+stdout:
 input:
+./calc.at:1355: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
   | (#) + (#) = 2222
 ./calc.at:1354:  $PREPARSER ./calc  input
+./calc.at:1355: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c calc.h
+
+490. calc.at:1362: testing Calculator %no-lines api.pure parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
 stderr:
+./calc.at:1362:
+    if "$POSIXLY_CORRECT_IS_EXPORTED"; then
+      sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y
+    else
+      mv calc.y.tmp calc.y
+    fi
+
+
 Starting parse
 Entering state 0
 Stack now 0
@@ -42785,6 +42882,22 @@
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1362: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+input:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1355:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -42907,1025 +43020,7 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1354: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1354: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1354:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1354: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1354: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1354:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
-Error: popping token error (1.2-4: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
-Error: popping token error (1.2-4: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1354: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1354: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1354:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-8: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
-Error: popping token error (1.2-8: )
-Stack now 0 4
-Shifting token error (1.2-10: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-8: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
-Error: popping token error (1.2-8: )
-Stack now 0 4
-Shifting token error (1.2-10: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1354: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1354: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1354:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
-Entering state 1
-Stack now 0 4 12 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 30
-Stack now 0 4 12 21 30
-Reading a token
-Next token is token ')' (1.7: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 27
-Stack now 0 4 12 27
-Reducing stack by rule 13 (line 138):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 23
-Stack now 0 8 23
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Stack now 0 8 23 4
-Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
-Entering state 1
-Stack now 0 8 23 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Stack now 0 8 23 4 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 20
-Stack now 0 8 23 4 12 20
-Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
-Entering state 1
-Stack now 0 8 23 4 12 20 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 29
-Stack now 0 8 23 4 12 20 29
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack by rule 8 (line 119):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Stack now 0 8 23 4 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 27
-Stack now 0 8 23 4 12 27
-Reducing stack by rule 13 (line 138):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 32
-Stack now 0 8 23 32
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 10 (line 121):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
-Entering state 1
-Stack now 0 4 12 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 30
-Stack now 0 4 12 21 30
-Reading a token
-Next token is token ')' (1.7: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 27
-Stack now 0 4 12 27
-Reducing stack by rule 13 (line 138):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 23
-Stack now 0 8 23
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Stack now 0 8 23 4
-Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
-Entering state 1
-Stack now 0 8 23 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Stack now 0 8 23 4 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 20
-Stack now 0 8 23 4 12 20
-Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
-Entering state 1
-Stack now 0 8 23 4 12 20 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 29
-Stack now 0 8 23 4 12 20 29
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack by rule 8 (line 119):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Stack now 0 8 23 4 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 27
-Stack now 0 8 23 4 12 27
-Reducing stack by rule 13 (line 138):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 32
-Stack now 0 8 23 32
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 10 (line 121):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1354: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1354: cat stderr
-485. calc.at:1354:  ok
-
-stderr:
-stdout:
-./calc.at:1355: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
-./calc.at:1355: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c calc.h
-
-489. calc.at:1360: testing Calculator api.pure parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
-./calc.at:1360:
-    if "$POSIXLY_CORRECT_IS_EXPORTED"; then
-      sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y
-    else
-      mv calc.y.tmp calc.y
-    fi
-
-
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1355:  $PREPARSER ./calc  input
-./calc.at:1360: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
 stderr:
-stderr:
-stdout:
 Starting parse
 Entering state 0
 Stack now 0
@@ -44944,19 +44039,18 @@
 Cleanup: popping token end of file (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
 ./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1357: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
+./calc.at:1354: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
-./calc.at:1357: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c calc.h
-
+./calc.at:1354: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -45976,23 +45070,233 @@
 Cleanup: popping nterm input (1.1-14.0: )
 ./calc.at:1355: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1357:  $PREPARSER ./calc  input
+  | (1 + #) = 1111
+./calc.at:1354:  $PREPARSER ./calc  input
+stderr:
 input:
+./calc.at:1362: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
   | 1 2
 ./calc.at:1355:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 139):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token number (1.3: 2)
+1.3: syntax error, unexpected number
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token number (1.3: 2)
+Stack now 0
+./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 139):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 stderr:
 Starting parse
 Entering state 0
@@ -46014,8 +45318,618 @@
 Stack now 0
 Cleanup: discarding lookahead token number (1.3: 2)
 Stack now 0
+./calc.at:1355: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1354: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1354: cat stderr
+./calc.at:1355: cat stderr
+input:
+input:
+  | 1//2
+./calc.at:1355:  $PREPARSER ./calc  input
+  | (# + 1) = 1111
+stderr:
+./calc.at:1354:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 23
+Stack now 0 8 23
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '/' (1.3: )
+Stack now 0
+./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.6: 1)
+Error: discarding token number (1.6: 1)
+Error: popping token error (1.2-4: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 139):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 23
+Stack now 0 8 23
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '/' (1.3: )
+Stack now 0
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.6: 1)
+Error: discarding token number (1.6: 1)
+Error: popping token error (1.2-4: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 139):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1355: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1354: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1355: cat stderr
+./calc.at:1354: cat stderr
+input:
+  | error
+./calc.at:1355:  $PREPARSER ./calc  input
+input:
 stderr:
+  | (1 + # + 1) = 1111
+./calc.at:1354:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token invalid token (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token (1.1: )
+Stack now 0
 ./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-8: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.10: 1)
+Error: discarding token number (1.10: 1)
+Error: popping token error (1.2-8: )
+Stack now 0 4
+Shifting token error (1.2-10: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 139):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token invalid token (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token (1.1: )
+Stack now 0
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-8: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.10: 1)
+Error: discarding token number (1.10: 1)
+Error: popping token error (1.2-8: )
+Stack now 0 4
+Shifting token error (1.2-10: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 139):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1355: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stdout:
+./calc.at:1357: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
+./calc.at:1354: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1355: cat stderr
+./calc.at:1357: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c calc.h
+
+input:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1357:  $PREPARSER ./calc  input
+input:
+  | 1 = 2 = 3
+./calc.at:1355:  $PREPARSER ./calc  input
+./calc.at:1354: cat stderr
+stderr:
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -47034,8 +46948,6 @@
 Cleanup: popping token end of file (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
 ./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -47050,12 +46962,34 @@
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token number (1.3: 2)
-1.3: syntax error, unexpected number
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Stack now 0 8 19
+Error: popping token '=' (1.3: )
+Stack now 0 8
 Error: popping nterm exp (1.1: 1)
 Stack now 0
-Cleanup: discarding lookahead token number (1.3: 2)
+Cleanup: discarding lookahead token '=' (1.7: )
 Stack now 0
+./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -48074,22 +48008,6 @@
 Cleanup: popping token end of file (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
 ./calc.at:1357: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-./calc.at:1355: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-./calc.at:1355: cat stderr
-  | 1 2
-./calc.at:1357:  $PREPARSER ./calc  input
-stderr:
-input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -48104,16 +48022,47 @@
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token number (1.3: 2)
-1.3: syntax error, unexpected number
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Stack now 0 8 19
+Error: popping token '=' (1.3: )
+Stack now 0 8
 Error: popping nterm exp (1.1: 1)
 Stack now 0
-Cleanup: discarding lookahead token number (1.3: 2)
+Cleanup: discarding lookahead token '=' (1.7: )
 Stack now 0
-./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1//2
-./calc.at:1355:  $PREPARSER ./calc  input
-stderr:
+input:
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1354:  $PREPARSER ./calc  input
+  | 1 2
+./calc.at:1357:  $PREPARSER ./calc  input
+./calc.at:1355: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -48135,121 +48084,151 @@
 Stack now 0
 Cleanup: discarding lookahead token number (1.3: 2)
 Stack now 0
+./calc.at:1355: cat stderr
+./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
 Entering state 1
-Stack now 0 1
+Stack now 0 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 23
-Stack now 0 8 23
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
 Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '/' (1.3: )
-Stack now 0
-./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
+Next token is token number (1.6: 1)
+Shifting token number (1.6: 1)
 Entering state 1
-Stack now 0 1
+Stack now 0 4 12 21 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 30
+Stack now 0 4 12 21 30
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack by rule 7 (line 118):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 27
+Stack now 0 4 12 27
+Reducing stack by rule 13 (line 138):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
 Entering state 23
 Stack now 0 8 23
 Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '/' (1.3: )
-Stack now 0
-./calc.at:1357: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1360: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
-./calc.at:1357: cat stderr
-./calc.at:1355: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1355: cat stderr
-input:
-  | 1//2
-./calc.at:1357:  $PREPARSER ./calc  input
-stderr:
-input:
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Stack now 0 8 23 4
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
 Entering state 1
-Stack now 0 1
+Stack now 0 8 23 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Stack now 0 8 23 4 12
 Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 23
-Stack now 0 8 23
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 20
+Stack now 0 8 23 4 12 20
 Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Stack now 0 8 23 4 12 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 29
+Stack now 0 8 23 4 12 20 29
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack by rule 8 (line 119):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Stack now 0 8 23 4 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 27
+Stack now 0 8 23 4 12 27
+Reducing stack by rule 13 (line 138):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 32
+Stack now 0 8 23 32
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 10 (line 121):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
 Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '/' (1.3: )
-Stack now 0
-./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | error
-./calc.at:1355:  $PREPARSER ./calc  input
-stderr:
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -48265,165 +48244,159 @@
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 23
-Stack now 0 8 23
-Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Stack now 0 8
+Next token is token number (1.3: 2)
+1.3: syntax error, unexpected number
 Error: popping nterm exp (1.1: 1)
 Stack now 0
-Cleanup: discarding lookahead token '/' (1.3: )
-Stack now 0
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token invalid token (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token (1.1: )
-Stack now 0
-./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token invalid token (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token (1.1: )
+Cleanup: discarding lookahead token number (1.3: 2)
 Stack now 0
-./calc.at:1357: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1355: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1357: cat stderr
 input:
-./calc.at:1355: cat stderr
-  | error
-./calc.at:1357:  $PREPARSER ./calc  input
 stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token invalid token (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token (1.1: )
-Stack now 0
-input:
-./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1 = 2 = 3
+  | 
+  | +1
 ./calc.at:1355:  $PREPARSER ./calc  input
-stderr:
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token invalid token (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token (1.1: )
-Stack now 0
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
 Entering state 1
-Stack now 0 1
+Stack now 0 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
 Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
-Entering state 19
-Stack now 0 8 19
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
 Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
+Next token is token number (1.6: 1)
+Shifting token number (1.6: 1)
 Entering state 1
-Stack now 0 8 19 1
+Stack now 0 4 12 21 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 28
-Stack now 0 8 19 28
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 30
+Stack now 0 4 12 21 30
 Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Stack now 0 8 19
-Error: popping token '=' (1.3: )
+Next token is token ')' (1.7: )
+Reducing stack by rule 7 (line 118):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 27
+Stack now 0 4 12 27
+Reducing stack by rule 13 (line 138):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
 Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '=' (1.7: )
-Stack now 0
-./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 23
+Stack now 0 8 23
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Stack now 0 8 23 4
+Reading a token
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
 Entering state 1
-Stack now 0 1
+Stack now 0 8 23 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Stack now 0 8 23 4 12
 Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
-Entering state 19
-Stack now 0 8 19
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 20
+Stack now 0 8 23 4 12 20
 Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
 Entering state 1
-Stack now 0 8 19 1
+Stack now 0 8 23 4 12 20 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 28
-Stack now 0 8 19 28
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 29
+Stack now 0 8 23 4 12 20 29
 Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Stack now 0 8 19
-Error: popping token '=' (1.3: )
+Next token is token ')' (1.17: )
+Reducing stack by rule 8 (line 119):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Stack now 0 8 23 4 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 27
+Stack now 0 8 23 4 12 27
+Reducing stack by rule 13 (line 138):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 32
+Stack now 0 8 23 32
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 10 (line 121):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
 Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '=' (1.7: )
-Stack now 0
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+stderr:
 ./calc.at:1357: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -48434,68 +48407,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1355: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1357: cat stderr
-input:
-./calc.at:1355: cat stderr
-  | 1 = 2 = 3
-./calc.at:1357:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Stack now 0 8 19
-Error: popping token '=' (1.3: )
-Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '=' (1.7: )
-Stack now 0
-input:
-./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 
-  | +1
-./calc.at:1355:  $PREPARSER ./calc  input
-stderr:
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -48522,46 +48433,18 @@
 Cleanup: discarding lookahead token '+' (2.1: )
 Stack now 0
 ./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Stack now 0 8 19
-Error: popping token '=' (1.3: )
-Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '=' (1.7: )
-Stack now 0
+./calc.at:1354: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
+./calc.at:1357: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -48587,21 +48470,8 @@
 Stack now 0
 Cleanup: discarding lookahead token '+' (2.1: )
 Stack now 0
-./calc.at:1357: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1357: cat stderr
+./calc.at:1354: cat stderr
 input:
-  | 
-  | +1
-./calc.at:1357:  $PREPARSER ./calc  input
 ./calc.at:1355: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -48612,62 +48482,78 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+  | 1//2
+./calc.at:1357:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Stack now 0 3
-Reducing stack by rule 3 (line 96):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
 Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 23
+Stack now 0 8 23
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
 Stack now 0
-Cleanup: discarding lookahead token '+' (2.1: )
+Cleanup: discarding lookahead token '/' (1.3: )
 Stack now 0
 ./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1355: cat stderr
+485. calc.at:1354:  ok
 stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Stack now 0 3
-Reducing stack by rule 3 (line 96):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
 Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 23
+Stack now 0 8 23
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
 Stack now 0
-Cleanup: discarding lookahead token '+' (2.1: )
+Cleanup: discarding lookahead token '/' (1.3: )
 Stack now 0
 ./calc.at:1355:  $PREPARSER ./calc  /dev/null
 stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Now at end of input.
+1.1: syntax error, unexpected end of file
+Cleanup: discarding lookahead token end of file (1.1: )
+Stack now 0
+./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1357: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -48678,18 +48564,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of file
-Cleanup: discarding lookahead token end of file (1.1: )
-Stack now 0
-./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1357: cat stderr
 stderr:
-./calc.at:1357:  $PREPARSER ./calc  /dev/null
+stderr:
+
+stdout:
 Starting parse
 Entering state 0
 Stack now 0
@@ -48698,7 +48576,19 @@
 1.1: syntax error, unexpected end of file
 Cleanup: discarding lookahead token end of file (1.1: )
 Stack now 0
-stderr:
+./calc.at:1357: cat stderr
+./calc.at:1358: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
+./calc.at:1358: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c calc.h
+
 ./calc.at:1355: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -48709,341 +48599,145 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+  | error
+./calc.at:1357:  $PREPARSER ./calc  input
+stderr:
+input:
+./calc.at:1355: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of file
-Cleanup: discarding lookahead token end of file (1.1: )
+Next token is token invalid token (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token (1.1: )
 Stack now 0
 ./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1358:  $PREPARSER ./calc  input
+input:
 stderr:
-./calc.at:1355: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of file
-Cleanup: discarding lookahead token end of file (1.1: )
+Next token is token invalid token (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token (1.1: )
 Stack now 0
-input:
-./calc.at:1357: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 ./calc.at:1355:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1357: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token ')' (1.2: )
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token ')' (1.2: )
-Shifting token ')' (1.2: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.2: )
--> $$ = nterm exp (1.1-2: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.6: )
-Shifting token '(' (1.6: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token number (1.7: 1)
-Shifting token number (1.7: 1)
-Entering state 1
-Stack now 0 8 21 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.7: 1)
--> $$ = nterm exp (1.7: 1)
-Entering state 12
-Stack now 0 8 21 4 12
-Reading a token
-Next token is token '+' (1.9: )
-Shifting token '+' (1.9: )
-Entering state 21
-Stack now 0 8 21 4 12 21
-Reading a token
-Next token is token number (1.11: 1)
-Shifting token number (1.11: 1)
-Entering state 1
-Stack now 0 8 21 4 12 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11: 1)
--> $$ = nterm exp (1.11: 1)
-Entering state 30
-Stack now 0 8 21 4 12 21 30
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.7: 1)
-   $2 = token '+' (1.9: )
-   $3 = nterm exp (1.11: 1)
--> $$ = nterm exp (1.7-11: 2)
-Entering state 12
-Stack now 0 8 21 4 12
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Stack now 0 8 21 4 12 21
-Reading a token
-Next token is token number (1.15: 1)
-Shifting token number (1.15: 1)
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
 Entering state 1
-Stack now 0 8 21 4 12 21 1
+Stack now 0 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.15: 1)
--> $$ = nterm exp (1.15: 1)
-Entering state 30
-Stack now 0 8 21 4 12 21 30
-Reading a token
-Next token is token '+' (1.17: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.7-11: 2)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15: 1)
--> $$ = nterm exp (1.7-15: 3)
-Entering state 12
-Stack now 0 8 21 4 12
-Next token is token '+' (1.17: )
-Shifting token '+' (1.17: )
-Entering state 21
-Stack now 0 8 21 4 12 21
-Reading a token
-Next token is token ')' (1.18: )
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' (1.17: )
-Stack now 0 8 21 4 12
-Error: popping nterm exp (1.7-15: 3)
-Stack now 0 8 21 4
-Shifting token error (1.7-18: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token ')' (1.18: )
-Shifting token ')' (1.18: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.6: )
-   $2 = token error (1.7-18: )
-   $3 = token ')' (1.18: )
--> $$ = nterm exp (1.6-18: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '+' (1.20: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.1-2: 1111)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6-18: 1111)
--> $$ = nterm exp (1.1-18: 2222)
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.20: )
-Shifting token '+' (1.20: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.22: )
-Shifting token '(' (1.22: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token '*' (1.23: )
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.23: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.23: )
-Error: discarding token '*' (1.23: )
-Error: popping token error (1.23: )
-Stack now 0 8 21 4
-Shifting token error (1.23: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token '*' (1.25: )
-Error: discarding token '*' (1.25: )
-Error: popping token error (1.23: )
-Stack now 0 8 21 4
-Shifting token error (1.23-25: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token '*' (1.27: )
-Error: discarding token '*' (1.27: )
-Error: popping token error (1.23-25: )
-Stack now 0 8 21 4
-Shifting token error (1.23-27: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.28: )
-Shifting token ')' (1.28: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.22: )
-   $2 = token error (1.23-27: )
-   $3 = token ')' (1.28: )
--> $$ = nterm exp (1.22-28: 1111)
-Entering state 30
-Stack now 0 8 21 30
+Return for a new token:
 Reading a token
-Next token is token '+' (1.30: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.1-18: 2222)
-   $2 = token '+' (1.20: )
-   $3 = nterm exp (1.22-28: 1111)
--> $$ = nterm exp (1.1-28: 3333)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.30: )
-Shifting token '+' (1.30: )
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
 Entering state 21
 Stack now 0 8 21
+Return for a new token:
 Reading a token
-Next token is token '(' (1.32: )
-Shifting token '(' (1.32: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token number (1.33: 1)
-Shifting token number (1.33: 1)
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
 Entering state 1
-Stack now 0 8 21 4 1
+Stack now 0 8 21 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.33: 1)
--> $$ = nterm exp (1.33: 1)
-Entering state 12
-Stack now 0 8 21 4 12
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
+Return for a new token:
 Reading a token
-Next token is token '*' (1.35: )
-Shifting token '*' (1.35: )
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
 Entering state 22
-Stack now 0 8 21 4 12 22
+Stack now 0 8 21 30 22
+Return for a new token:
 Reading a token
-Next token is token number (1.37: 2)
-Shifting token number (1.37: 2)
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
 Entering state 1
-Stack now 0 8 21 4 12 22 1
+Stack now 0 8 21 30 22 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.37: 2)
--> $$ = nterm exp (1.37: 2)
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
 Entering state 31
-Stack now 0 8 21 4 12 22 31
-Reading a token
-Next token is token '*' (1.39: )
-Reducing stack by rule 9 (line 120):
-   $1 = nterm exp (1.33: 1)
-   $2 = token '*' (1.35: )
-   $3 = nterm exp (1.37: 2)
--> $$ = nterm exp (1.33-37: 2)
-Entering state 12
-Stack now 0 8 21 4 12
-Next token is token '*' (1.39: )
-Shifting token '*' (1.39: )
-Entering state 22
-Stack now 0 8 21 4 12 22
-Reading a token
-Next token is token '*' (1.41: )
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' (1.39: )
-Stack now 0 8 21 4 12
-Error: popping nterm exp (1.33-37: 2)
-Stack now 0 8 21 4
-Shifting token error (1.33-41: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.41: )
-Error: discarding token '*' (1.41: )
-Error: popping token error (1.33-41: )
-Stack now 0 8 21 4
-Shifting token error (1.33-41: )
-Entering state 11
-Stack now 0 8 21 4 11
+Stack now 0 8 21 30 22 31
+Return for a new token:
 Reading a token
-Next token is token ')' (1.42: )
-Shifting token ')' (1.42: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.32: )
-   $2 = token error (1.33-41: )
-   $3 = token ')' (1.42: )
--> $$ = nterm exp (1.32-42: 1111)
+Next token is token '=' (1.11: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
 Entering state 30
 Stack now 0 8 21 30
-Reading a token
-Next token is token '=' (1.44: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.1-28: 3333)
-   $2 = token '+' (1.30: )
-   $3 = nterm exp (1.32-42: 1111)
--> $$ = nterm exp (1.1-42: 4444)
+Next token is token '=' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
 Stack now 0 8
-Next token is token '=' (1.44: )
-Shifting token '=' (1.44: )
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
 Entering state 19
 Stack now 0 8 19
+Return for a new token:
 Reading a token
-Next token is token number (1.46: 1)
-Shifting token number (1.46: 1)
+Next token is token number (1.13: 7)
+Shifting token number (1.13: 7)
 Entering state 1
 Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.46: 1)
--> $$ = nterm exp (1.46: 1)
+   $1 = token number (1.13: 7)
+-> $$ = nterm exp (1.13: 7)
 Entering state 28
 Stack now 0 8 19 28
+Return for a new token:
 Reading a token
-Next token is token '\n' (1.47-2.0: )
+Next token is token '\n' (1.14-2.0: )
 Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-42: 4444)
-   $2 = token '=' (1.44: )
-   $3 = nterm exp (1.46: 1)
-1.1-46: error: 4444 != 1
--> $$ = nterm exp (1.1-46: 4444)
+   $1 = nterm exp (1.1-9: 7)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13: 7)
+-> $$ = nterm exp (1.1-13: 7)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.47-2.0: )
-Shifting token '\n' (1.47-2.0: )
+Next token is token '\n' (1.14-2.0: )
+Shifting token '\n' (1.14-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-46: 4444)
-   $2 = token '\n' (1.47-2.0: )
+   $1 = nterm exp (1.1-13: 7)
+   $2 = token '\n' (1.14-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -49052,438 +48746,1114 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
+Return for a new token:
 Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1357:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token ')' (1.2: )
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token ')' (1.2: )
-Shifting token ')' (1.2: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.2: )
--> $$ = nterm exp (1.1-2: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.6: )
-Shifting token '(' (1.6: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token number (1.7: 1)
-Shifting token number (1.7: 1)
+Next token is token number (2.1: 1)
+Shifting token number (2.1: 1)
 Entering state 1
-Stack now 0 8 21 4 1
+Stack now 0 6 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.7: 1)
--> $$ = nterm exp (1.7: 1)
-Entering state 12
-Stack now 0 8 21 4 12
+   $1 = token number (2.1: 1)
+-> $$ = nterm exp (2.1: 1)
+Entering state 8
+Stack now 0 6 8
+Return for a new token:
 Reading a token
-Next token is token '+' (1.9: )
-Shifting token '+' (1.9: )
+Next token is token '+' (2.3: )
+Shifting token '+' (2.3: )
 Entering state 21
-Stack now 0 8 21 4 12 21
+Stack now 0 6 8 21
+Return for a new token:
 Reading a token
-Next token is token number (1.11: 1)
-Shifting token number (1.11: 1)
+Next token is token number (2.5: 2)
+Shifting token number (2.5: 2)
 Entering state 1
-Stack now 0 8 21 4 12 21 1
+Stack now 0 6 8 21 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11: 1)
--> $$ = nterm exp (1.11: 1)
+   $1 = token number (2.5: 2)
+-> $$ = nterm exp (2.5: 2)
 Entering state 30
-Stack now 0 8 21 4 12 21 30
+Stack now 0 6 8 21 30
+Return for a new token:
 Reading a token
-Next token is token '+' (1.13: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.7: 1)
-   $2 = token '+' (1.9: )
-   $3 = nterm exp (1.11: 1)
--> $$ = nterm exp (1.7-11: 2)
-Entering state 12
-Stack now 0 8 21 4 12
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Stack now 0 8 21 4 12 21
+Next token is token '*' (2.7: )
+Shifting token '*' (2.7: )
+Entering state 22
+Stack now 0 6 8 21 30 22
+Return for a new token:
 Reading a token
-Next token is token number (1.15: 1)
-Shifting token number (1.15: 1)
+Next token is token '-' (2.9: )
+Shifting token '-' (2.9: )
+Entering state 2
+Stack now 0 6 8 21 30 22 2
+Return for a new token:
+Reading a token
+Next token is token number (2.10: 3)
+Shifting token number (2.10: 3)
 Entering state 1
-Stack now 0 8 21 4 12 21 1
+Stack now 0 6 8 21 30 22 2 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.15: 1)
--> $$ = nterm exp (1.15: 1)
-Entering state 30
-Stack now 0 8 21 4 12 21 30
-Reading a token
-Next token is token '+' (1.17: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.7-11: 2)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15: 1)
--> $$ = nterm exp (1.7-15: 3)
-Entering state 12
-Stack now 0 8 21 4 12
-Next token is token '+' (1.17: )
-Shifting token '+' (1.17: )
-Entering state 21
-Stack now 0 8 21 4 12 21
+   $1 = token number (2.10: 3)
+-> $$ = nterm exp (2.10: 3)
+Entering state 10
+Stack now 0 6 8 21 30 22 2 10
+Return for a new token:
 Reading a token
-Next token is token ')' (1.18: )
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' (1.17: )
-Stack now 0 8 21 4 12
-Error: popping nterm exp (1.7-15: 3)
-Stack now 0 8 21 4
-Shifting token error (1.7-18: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token ')' (1.18: )
-Shifting token ')' (1.18: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.6: )
-   $2 = token error (1.7-18: )
-   $3 = token ')' (1.18: )
--> $$ = nterm exp (1.6-18: 1111)
+Next token is token '=' (2.12: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (2.9: )
+   $2 = nterm exp (2.10: 3)
+-> $$ = nterm exp (2.9-10: -3)
+Entering state 31
+Stack now 0 6 8 21 30 22 31
+Next token is token '=' (2.12: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (2.5: 2)
+   $2 = token '*' (2.7: )
+   $3 = nterm exp (2.9-10: -3)
+-> $$ = nterm exp (2.5-10: -6)
 Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '+' (1.20: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.1-2: 1111)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6-18: 1111)
--> $$ = nterm exp (1.1-18: 2222)
+Stack now 0 6 8 21 30
+Next token is token '=' (2.12: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (2.1: 1)
+   $2 = token '+' (2.3: )
+   $3 = nterm exp (2.5-10: -6)
+-> $$ = nterm exp (2.1-10: -5)
 Entering state 8
-Stack now 0 8
-Next token is token '+' (1.20: )
-Shifting token '+' (1.20: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.22: )
-Shifting token '(' (1.22: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token '*' (1.23: )
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.23: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.23: )
-Error: discarding token '*' (1.23: )
-Error: popping token error (1.23: )
-Stack now 0 8 21 4
-Shifting token error (1.23: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token '*' (1.25: )
-Error: discarding token '*' (1.25: )
-Error: popping token error (1.23: )
-Stack now 0 8 21 4
-Shifting token error (1.23-25: )
-Entering state 11
-Stack now 0 8 21 4 11
+Stack now 0 6 8
+Next token is token '=' (2.12: )
+Shifting token '=' (2.12: )
+Entering state 19
+Stack now 0 6 8 19
+Return for a new token:
 Reading a token
-Next token is token '*' (1.27: )
-Error: discarding token '*' (1.27: )
-Error: popping token error (1.23-25: )
-Stack now 0 8 21 4
-Shifting token error (1.23-27: )
-Entering state 11
-Stack now 0 8 21 4 11
+Next token is token '-' (2.14: )
+Shifting token '-' (2.14: )
+Entering state 2
+Stack now 0 6 8 19 2
+Return for a new token:
 Reading a token
-Next token is token ')' (1.28: )
-Shifting token ')' (1.28: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.22: )
-   $2 = token error (1.23-27: )
-   $3 = token ')' (1.28: )
--> $$ = nterm exp (1.22-28: 1111)
-Entering state 30
-Stack now 0 8 21 30
+Next token is token number (2.15: 5)
+Shifting token number (2.15: 5)
+Entering state 1
+Stack now 0 6 8 19 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (2.15: 5)
+-> $$ = nterm exp (2.15: 5)
+Entering state 10
+Stack now 0 6 8 19 2 10
+Return for a new token:
 Reading a token
-Next token is token '+' (1.30: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.1-18: 2222)
-   $2 = token '+' (1.20: )
-   $3 = nterm exp (1.22-28: 1111)
--> $$ = nterm exp (1.1-28: 3333)
+Next token is token '\n' (2.16-3.0: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (2.14: )
+   $2 = nterm exp (2.15: 5)
+-> $$ = nterm exp (2.14-15: -5)
+Entering state 28
+Stack now 0 6 8 19 28
+Next token is token '\n' (2.16-3.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (2.1-10: -5)
+   $2 = token '=' (2.12: )
+   $3 = nterm exp (2.14-15: -5)
+-> $$ = nterm exp (2.1-15: -5)
 Entering state 8
-Stack now 0 8
-Next token is token '+' (1.30: )
-Shifting token '+' (1.30: )
-Entering state 21
-Stack now 0 8 21
+Stack now 0 6 8
+Next token is token '\n' (2.16-3.0: )
+Shifting token '\n' (2.16-3.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (2.1-15: -5)
+   $2 = token '\n' (2.16-3.0: )
+-> $$ = nterm line (2.1-3.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-2.0: )
+   $2 = nterm line (2.1-3.0: )
+-> $$ = nterm input (1.1-3.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
 Reading a token
-Next token is token '(' (1.32: )
-Shifting token '(' (1.32: )
-Entering state 4
-Stack now 0 8 21 4
+Next token is token '\n' (3.1-4.0: )
+Shifting token '\n' (3.1-4.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (3.1-4.0: )
+-> $$ = nterm line (3.1-4.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-3.0: )
+   $2 = nterm line (3.1-4.0: )
+-> $$ = nterm input (1.1-4.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
 Reading a token
-Next token is token number (1.33: 1)
-Shifting token number (1.33: 1)
+Next token is token '-' (4.1: )
+Shifting token '-' (4.1: )
+Entering state 2
+Stack now 0 6 2
+Return for a new token:
+Reading a token
+Next token is token number (4.2: 1)
+Shifting token number (4.2: 1)
 Entering state 1
-Stack now 0 8 21 4 1
+Stack now 0 6 2 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.33: 1)
--> $$ = nterm exp (1.33: 1)
-Entering state 12
-Stack now 0 8 21 4 12
+   $1 = token number (4.2: 1)
+-> $$ = nterm exp (4.2: 1)
+Entering state 10
+Stack now 0 6 2 10
+Return for a new token:
 Reading a token
-Next token is token '*' (1.35: )
-Shifting token '*' (1.35: )
-Entering state 22
-Stack now 0 8 21 4 12 22
+Next token is token '^' (4.3: )
+Shifting token '^' (4.3: )
+Entering state 24
+Stack now 0 6 2 10 24
+Return for a new token:
 Reading a token
-Next token is token number (1.37: 2)
-Shifting token number (1.37: 2)
+Next token is token number (4.4: 2)
+Shifting token number (4.4: 2)
 Entering state 1
-Stack now 0 8 21 4 12 22 1
+Stack now 0 6 2 10 24 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.37: 2)
--> $$ = nterm exp (1.37: 2)
-Entering state 31
-Stack now 0 8 21 4 12 22 31
-Reading a token
-Next token is token '*' (1.39: )
-Reducing stack by rule 9 (line 120):
-   $1 = nterm exp (1.33: 1)
-   $2 = token '*' (1.35: )
-   $3 = nterm exp (1.37: 2)
--> $$ = nterm exp (1.33-37: 2)
-Entering state 12
-Stack now 0 8 21 4 12
-Next token is token '*' (1.39: )
-Shifting token '*' (1.39: )
-Entering state 22
-Stack now 0 8 21 4 12 22
-Reading a token
-Next token is token '*' (1.41: )
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' (1.39: )
-Stack now 0 8 21 4 12
-Error: popping nterm exp (1.33-37: 2)
-Stack now 0 8 21 4
-Shifting token error (1.33-41: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.41: )
-Error: discarding token '*' (1.41: )
-Error: popping token error (1.33-41: )
-Stack now 0 8 21 4
-Shifting token error (1.33-41: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.42: )
-Shifting token ')' (1.42: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.32: )
-   $2 = token error (1.33-41: )
-   $3 = token ')' (1.42: )
--> $$ = nterm exp (1.32-42: 1111)
-Entering state 30
-Stack now 0 8 21 30
+   $1 = token number (4.4: 2)
+-> $$ = nterm exp (4.4: 2)
+Entering state 33
+Stack now 0 6 2 10 24 33
+Return for a new token:
 Reading a token
-Next token is token '=' (1.44: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.1-28: 3333)
-   $2 = token '+' (1.30: )
-   $3 = nterm exp (1.32-42: 1111)
--> $$ = nterm exp (1.1-42: 4444)
+Next token is token '=' (4.6: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (4.2: 1)
+   $2 = token '^' (4.3: )
+   $3 = nterm exp (4.4: 2)
+-> $$ = nterm exp (4.2-4: 1)
+Entering state 10
+Stack now 0 6 2 10
+Next token is token '=' (4.6: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (4.1: )
+   $2 = nterm exp (4.2-4: 1)
+-> $$ = nterm exp (4.1-4: -1)
 Entering state 8
-Stack now 0 8
-Next token is token '=' (1.44: )
-Shifting token '=' (1.44: )
+Stack now 0 6 8
+Next token is token '=' (4.6: )
+Shifting token '=' (4.6: )
 Entering state 19
-Stack now 0 8 19
+Stack now 0 6 8 19
+Return for a new token:
 Reading a token
-Next token is token number (1.46: 1)
-Shifting token number (1.46: 1)
+Next token is token '-' (4.8: )
+Shifting token '-' (4.8: )
+Entering state 2
+Stack now 0 6 8 19 2
+Return for a new token:
+Reading a token
+Next token is token number (4.9: 1)
+Shifting token number (4.9: 1)
 Entering state 1
-Stack now 0 8 19 1
+Stack now 0 6 8 19 2 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.46: 1)
--> $$ = nterm exp (1.46: 1)
-Entering state 28
-Stack now 0 8 19 28
+   $1 = token number (4.9: 1)
+-> $$ = nterm exp (4.9: 1)
+Entering state 10
+Stack now 0 6 8 19 2 10
+Return for a new token:
 Reading a token
-Next token is token '\n' (1.47-2.0: )
+Next token is token '\n' (4.10-5.0: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (4.8: )
+   $2 = nterm exp (4.9: 1)
+-> $$ = nterm exp (4.8-9: -1)
+Entering state 28
+Stack now 0 6 8 19 28
+Next token is token '\n' (4.10-5.0: )
 Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-42: 4444)
-   $2 = token '=' (1.44: )
-   $3 = nterm exp (1.46: 1)
-1.1-46: error: 4444 != 1
--> $$ = nterm exp (1.1-46: 4444)
+   $1 = nterm exp (4.1-4: -1)
+   $2 = token '=' (4.6: )
+   $3 = nterm exp (4.8-9: -1)
+-> $$ = nterm exp (4.1-9: -1)
 Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.47-2.0: )
-Shifting token '\n' (1.47-2.0: )
+Stack now 0 6 8
+Next token is token '\n' (4.10-5.0: )
+Shifting token '\n' (4.10-5.0: )
 Entering state 25
-Stack now 0 8 25
+Stack now 0 6 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-46: 4444)
-   $2 = token '\n' (1.47-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+   $1 = nterm exp (4.1-9: -1)
+   $2 = token '\n' (4.10-5.0: )
+-> $$ = nterm line (4.1-5.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-4.0: )
+   $2 = nterm line (4.1-5.0: )
+-> $$ = nterm input (1.1-5.0: )
 Entering state 6
 Stack now 0 6
+Return for a new token:
 Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
+Next token is token '(' (5.1: )
+Shifting token '(' (5.1: )
 Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token ')' (1.2: )
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token ')' (1.2: )
-Shifting token ')' (1.2: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.2: )
--> $$ = nterm exp (1.1-2: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 8 21
+Stack now 0 6 4
+Return for a new token:
 Reading a token
-Next token is token '(' (1.6: )
-Shifting token '(' (1.6: )
-Entering state 4
-Stack now 0 8 21 4
+Next token is token '-' (5.2: )
+Shifting token '-' (5.2: )
+Entering state 2
+Stack now 0 6 4 2
+Return for a new token:
 Reading a token
-Next token is token number (1.7: 1)
-Shifting token number (1.7: 1)
+Next token is token number (5.3: 1)
+Shifting token number (5.3: 1)
 Entering state 1
-Stack now 0 8 21 4 1
+Stack now 0 6 4 2 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.7: 1)
--> $$ = nterm exp (1.7: 1)
+   $1 = token number (5.3: 1)
+-> $$ = nterm exp (5.3: 1)
+Entering state 10
+Stack now 0 6 4 2 10
+Return for a new token:
+Reading a token
+Next token is token ')' (5.4: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (5.2: )
+   $2 = nterm exp (5.3: 1)
+-> $$ = nterm exp (5.2-3: -1)
 Entering state 12
-Stack now 0 8 21 4 12
+Stack now 0 6 4 12
+Next token is token ')' (5.4: )
+Shifting token ')' (5.4: )
+Entering state 27
+Stack now 0 6 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (5.1: )
+   $2 = nterm exp (5.2-3: -1)
+   $3 = token ')' (5.4: )
+-> $$ = nterm exp (5.1-4: -1)
+Entering state 8
+Stack now 0 6 8
+Return for a new token:
 Reading a token
-Next token is token '+' (1.9: )
-Shifting token '+' (1.9: )
-Entering state 21
-Stack now 0 8 21 4 12 21
+Next token is token '^' (5.5: )
+Shifting token '^' (5.5: )
+Entering state 24
+Stack now 0 6 8 24
+Return for a new token:
 Reading a token
-Next token is token number (1.11: 1)
-Shifting token number (1.11: 1)
+Next token is token number (5.6: 2)
+Shifting token number (5.6: 2)
 Entering state 1
-Stack now 0 8 21 4 12 21 1
+Stack now 0 6 8 24 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11: 1)
--> $$ = nterm exp (1.11: 1)
-Entering state 30
-Stack now 0 8 21 4 12 21 30
+   $1 = token number (5.6: 2)
+-> $$ = nterm exp (5.6: 2)
+Entering state 33
+Stack now 0 6 8 24 33
+Return for a new token:
 Reading a token
-Next token is token '+' (1.13: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.7: 1)
-   $2 = token '+' (1.9: )
-   $3 = nterm exp (1.11: 1)
--> $$ = nterm exp (1.7-11: 2)
-Entering state 12
-Stack now 0 8 21 4 12
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Stack now 0 8 21 4 12 21
+Next token is token '=' (5.8: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (5.1-4: -1)
+   $2 = token '^' (5.5: )
+   $3 = nterm exp (5.6: 2)
+-> $$ = nterm exp (5.1-6: 1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (5.8: )
+Shifting token '=' (5.8: )
+Entering state 19
+Stack now 0 6 8 19
+Return for a new token:
 Reading a token
-Next token is token number (1.15: 1)
-Shifting token number (1.15: 1)
+Next token is token number (5.10: 1)
+Shifting token number (5.10: 1)
 Entering state 1
-Stack now 0 8 21 4 12 21 1
+Stack now 0 6 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.15: 1)
--> $$ = nterm exp (1.15: 1)
-Entering state 30
-Stack now 0 8 21 4 12 21 30
-Reading a token
-Next token is token '+' (1.17: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.7-11: 2)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15: 1)
--> $$ = nterm exp (1.7-15: 3)
-Entering state 12
-Stack now 0 8 21 4 12
-Next token is token '+' (1.17: )
-Shifting token '+' (1.17: )
-Entering state 21
-Stack now 0 8 21 4 12 21
+   $1 = token number (5.10: 1)
+-> $$ = nterm exp (5.10: 1)
+Entering state 28
+Stack now 0 6 8 19 28
+Return for a new token:
 Reading a token
-Next token is token ')' (1.18: )
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' (1.17: )
-Stack now 0 8 21 4 12
+Next token is token '\n' (5.11-6.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (5.1-6: 1)
+   $2 = token '=' (5.8: )
+   $3 = nterm exp (5.10: 1)
+-> $$ = nterm exp (5.1-10: 1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (5.11-6.0: )
+Shifting token '\n' (5.11-6.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (5.1-10: 1)
+   $2 = token '\n' (5.11-6.0: )
+-> $$ = nterm line (5.1-6.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-5.0: )
+   $2 = nterm line (5.1-6.0: )
+-> $$ = nterm input (1.1-6.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Next token is token '\n' (6.1-7.0: )
+Shifting token '\n' (6.1-7.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (6.1-7.0: )
+-> $$ = nterm line (6.1-7.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-6.0: )
+   $2 = nterm line (6.1-7.0: )
+-> $$ = nterm input (1.1-7.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Next token is token '-' (7.1: )
+Shifting token '-' (7.1: )
+Entering state 2
+Stack now 0 6 2
+Return for a new token:
+Reading a token
+Next token is token '-' (7.2: )
+Shifting token '-' (7.2: )
+Entering state 2
+Stack now 0 6 2 2
+Return for a new token:
+Reading a token
+Next token is token '-' (7.3: )
+Shifting token '-' (7.3: )
+Entering state 2
+Stack now 0 6 2 2 2
+Return for a new token:
+Reading a token
+Next token is token number (7.4: 1)
+Shifting token number (7.4: 1)
+Entering state 1
+Stack now 0 6 2 2 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (7.4: 1)
+-> $$ = nterm exp (7.4: 1)
+Entering state 10
+Stack now 0 6 2 2 2 10
+Return for a new token:
+Reading a token
+Next token is token '=' (7.6: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (7.3: )
+   $2 = nterm exp (7.4: 1)
+-> $$ = nterm exp (7.3-4: -1)
+Entering state 10
+Stack now 0 6 2 2 10
+Next token is token '=' (7.6: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (7.2: )
+   $2 = nterm exp (7.3-4: -1)
+-> $$ = nterm exp (7.2-4: 1)
+Entering state 10
+Stack now 0 6 2 10
+Next token is token '=' (7.6: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (7.1: )
+   $2 = nterm exp (7.2-4: 1)
+-> $$ = nterm exp (7.1-4: -1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (7.6: )
+Shifting token '=' (7.6: )
+Entering state 19
+Stack now 0 6 8 19
+Return for a new token:
+Reading a token
+Next token is token '-' (7.8: )
+Shifting token '-' (7.8: )
+Entering state 2
+Stack now 0 6 8 19 2
+Return for a new token:
+Reading a token
+Next token is token number (7.9: 1)
+Shifting token number (7.9: 1)
+Entering state 1
+Stack now 0 6 8 19 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (7.9: 1)
+-> $$ = nterm exp (7.9: 1)
+Entering state 10
+Stack now 0 6 8 19 2 10
+Return for a new token:
+Reading a token
+Next token is token '\n' (7.10-8.0: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (7.8: )
+   $2 = nterm exp (7.9: 1)
+-> $$ = nterm exp (7.8-9: -1)
+Entering state 28
+Stack now 0 6 8 19 28
+Next token is token '\n' (7.10-8.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (7.1-4: -1)
+   $2 = token '=' (7.6: )
+   $3 = nterm exp (7.8-9: -1)
+-> $$ = nterm exp (7.1-9: -1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (7.10-8.0: )
+Shifting token '\n' (7.10-8.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (7.1-9: -1)
+   $2 = token '\n' (7.10-8.0: )
+-> $$ = nterm line (7.1-8.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-7.0: )
+   $2 = nterm line (7.1-8.0: )
+-> $$ = nterm input (1.1-8.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Next token is token '\n' (8.1-9.0: )
+Shifting token '\n' (8.1-9.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (8.1-9.0: )
+-> $$ = nterm line (8.1-9.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-8.0: )
+   $2 = nterm line (8.1-9.0: )
+-> $$ = nterm input (1.1-9.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Next token is token number (9.1: 1)
+Shifting token number (9.1: 1)
+Entering state 1
+Stack now 0 6 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (9.1: 1)
+-> $$ = nterm exp (9.1: 1)
+Entering state 8
+Stack now 0 6 8
+Return for a new token:
+Reading a token
+Next token is token '-' (9.3: )
+Shifting token '-' (9.3: )
+Entering state 20
+Stack now 0 6 8 20
+Return for a new token:
+Reading a token
+Next token is token number (9.5: 2)
+Shifting token number (9.5: 2)
+Entering state 1
+Stack now 0 6 8 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (9.5: 2)
+-> $$ = nterm exp (9.5: 2)
+Entering state 29
+Stack now 0 6 8 20 29
+Return for a new token:
+Reading a token
+Next token is token '-' (9.7: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (9.1: 1)
+   $2 = token '-' (9.3: )
+   $3 = nterm exp (9.5: 2)
+-> $$ = nterm exp (9.1-5: -1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '-' (9.7: )
+Shifting token '-' (9.7: )
+Entering state 20
+Stack now 0 6 8 20
+Return for a new token:
+Reading a token
+Next token is token number (9.9: 3)
+Shifting token number (9.9: 3)
+Entering state 1
+Stack now 0 6 8 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (9.9: 3)
+-> $$ = nterm exp (9.9: 3)
+Entering state 29
+Stack now 0 6 8 20 29
+Return for a new token:
+Reading a token
+Next token is token '=' (9.11: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (9.1-5: -1)
+   $2 = token '-' (9.7: )
+   $3 = nterm exp (9.9: 3)
+-> $$ = nterm exp (9.1-9: -4)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (9.11: )
+Shifting token '=' (9.11: )
+Entering state 19
+Stack now 0 6 8 19
+Return for a new token:
+Reading a token
+Next token is token '-' (9.13: )
+Shifting token '-' (9.13: )
+Entering state 2
+Stack now 0 6 8 19 2
+Return for a new token:
+Reading a token
+Next token is token number (9.14: 4)
+Shifting token number (9.14: 4)
+Entering state 1
+Stack now 0 6 8 19 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (9.14: 4)
+-> $$ = nterm exp (9.14: 4)
+Entering state 10
+Stack now 0 6 8 19 2 10
+Return for a new token:
+Reading a token
+Next token is token '\n' (9.15-10.0: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (9.13: )
+   $2 = nterm exp (9.14: 4)
+-> $$ = nterm exp (9.13-14: -4)
+Entering state 28
+Stack now 0 6 8 19 28
+Next token is token '\n' (9.15-10.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (9.1-9: -4)
+   $2 = token '=' (9.11: )
+   $3 = nterm exp (9.13-14: -4)
+-> $$ = nterm exp (9.1-14: -4)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (9.15-10.0: )
+Shifting token '\n' (9.15-10.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (9.1-14: -4)
+   $2 = token '\n' (9.15-10.0: )
+-> $$ = nterm line (9.1-10.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-9.0: )
+   $2 = nterm line (9.1-10.0: )
+-> $$ = nterm input (1.1-10.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Next token is token number (10.1: 1)
+Shifting token number (10.1: 1)
+Entering state 1
+Stack now 0 6 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (10.1: 1)
+-> $$ = nterm exp (10.1: 1)
+Entering state 8
+Stack now 0 6 8
+Return for a new token:
+Reading a token
+Next token is token '-' (10.3: )
+Shifting token '-' (10.3: )
+Entering state 20
+Stack now 0 6 8 20
+Return for a new token:
+Reading a token
+Next token is token '(' (10.5: )
+Shifting token '(' (10.5: )
+Entering state 4
+Stack now 0 6 8 20 4
+Return for a new token:
+Reading a token
+Next token is token number (10.6: 2)
+Shifting token number (10.6: 2)
+Entering state 1
+Stack now 0 6 8 20 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (10.6: 2)
+-> $$ = nterm exp (10.6: 2)
+Entering state 12
+Stack now 0 6 8 20 4 12
+Return for a new token:
+Reading a token
+Next token is token '-' (10.8: )
+Shifting token '-' (10.8: )
+Entering state 20
+Stack now 0 6 8 20 4 12 20
+Return for a new token:
+Reading a token
+Next token is token number (10.10: 3)
+Shifting token number (10.10: 3)
+Entering state 1
+Stack now 0 6 8 20 4 12 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (10.10: 3)
+-> $$ = nterm exp (10.10: 3)
+Entering state 29
+Stack now 0 6 8 20 4 12 20 29
+Return for a new token:
+Reading a token
+Next token is token ')' (10.11: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (10.6: 2)
+   $2 = token '-' (10.8: )
+   $3 = nterm exp (10.10: 3)
+-> $$ = nterm exp (10.6-10: -1)
+Entering state 12
+Stack now 0 6 8 20 4 12
+Next token is token ')' (10.11: )
+Shifting token ')' (10.11: )
+Entering state 27
+Stack now 0 6 8 20 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (10.5: )
+   $2 = nterm exp (10.6-10: -1)
+   $3 = token ')' (10.11: )
+-> $$ = nterm exp (10.5-11: -1)
+Entering state 29
+Stack now 0 6 8 20 29
+Return for a new token:
+Reading a token
+Next token is token '=' (10.13: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (10.1: 1)
+   $2 = token '-' (10.3: )
+   $3 = nterm exp (10.5-11: -1)
+-> $$ = nterm exp (10.1-11: 2)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (10.13: )
+Shifting token '=' (10.13: )
+Entering state 19
+Stack now 0 6 8 19
+Return for a new token:
+Reading a token
+Next token is token number (10.15: 2)
+Shifting token number (10.15: 2)
+Entering state 1
+Stack now 0 6 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (10.15: 2)
+-> $$ = nterm exp (10.15: 2)
+Entering state 28
+Stack now 0 6 8 19 28
+Return for a new token:
+Reading a token
+Next token is token '\n' (10.16-11.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (10.1-11: 2)
+   $2 = token '=' (10.13: )
+   $3 = nterm exp (10.15: 2)
+-> $$ = nterm exp (10.1-15: 2)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (10.16-11.0: )
+Shifting token '\n' (10.16-11.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (10.1-15: 2)
+   $2 = token '\n' (10.16-11.0: )
+-> $$ = nterm line (10.1-11.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-10.0: )
+   $2 = nterm line (10.1-11.0: )
+-> $$ = nterm input (1.1-11.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Next token is token '\n' (11.1-12.0: )
+Shifting token '\n' (11.1-12.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (11.1-12.0: )
+-> $$ = nterm line (11.1-12.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-11.0: )
+   $2 = nterm line (11.1-12.0: )
+-> $$ = nterm input (1.1-12.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Next token is token number (12.1: 2)
+Shifting token number (12.1: 2)
+Entering state 1
+Stack now 0 6 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (12.1: 2)
+-> $$ = nterm exp (12.1: 2)
+Entering state 8
+Stack now 0 6 8
+Return for a new token:
+Reading a token
+Next token is token '^' (12.2: )
+Shifting token '^' (12.2: )
+Entering state 24
+Stack now 0 6 8 24
+Return for a new token:
+Reading a token
+Next token is token number (12.3: 2)
+Shifting token number (12.3: 2)
+Entering state 1
+Stack now 0 6 8 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (12.3: 2)
+-> $$ = nterm exp (12.3: 2)
+Entering state 33
+Stack now 0 6 8 24 33
+Return for a new token:
+Reading a token
+Next token is token '^' (12.4: )
+Shifting token '^' (12.4: )
+Entering state 24
+Stack now 0 6 8 24 33 24
+Return for a new token:
+Reading a token
+Next token is token number (12.5: 3)
+Shifting token number (12.5: 3)
+Entering state 1
+Stack now 0 6 8 24 33 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (12.5: 3)
+-> $$ = nterm exp (12.5: 3)
+Entering state 33
+Stack now 0 6 8 24 33 24 33
+Return for a new token:
+Reading a token
+Next token is token '=' (12.7: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (12.3: 2)
+   $2 = token '^' (12.4: )
+   $3 = nterm exp (12.5: 3)
+-> $$ = nterm exp (12.3-5: 8)
+Entering state 33
+Stack now 0 6 8 24 33
+Next token is token '=' (12.7: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (12.1: 2)
+   $2 = token '^' (12.2: )
+   $3 = nterm exp (12.3-5: 8)
+-> $$ = nterm exp (12.1-5: 256)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (12.7: )
+Shifting token '=' (12.7: )
+Entering state 19
+Stack now 0 6 8 19
+Return for a new token:
+Reading a token
+Next token is token number (12.9-11: 256)
+Shifting token number (12.9-11: 256)
+Entering state 1
+Stack now 0 6 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (12.9-11: 256)
+-> $$ = nterm exp (12.9-11: 256)
+Entering state 28
+Stack now 0 6 8 19 28
+Return for a new token:
+Reading a token
+Next token is token '\n' (12.12-13.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (12.1-5: 256)
+   $2 = token '=' (12.7: )
+   $3 = nterm exp (12.9-11: 256)
+-> $$ = nterm exp (12.1-11: 256)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (12.12-13.0: )
+Shifting token '\n' (12.12-13.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (12.1-11: 256)
+   $2 = token '\n' (12.12-13.0: )
+-> $$ = nterm line (12.1-13.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-12.0: )
+   $2 = nterm line (12.1-13.0: )
+-> $$ = nterm input (1.1-13.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Next token is token '(' (13.1: )
+Shifting token '(' (13.1: )
+Entering state 4
+Stack now 0 6 4
+Return for a new token:
+Reading a token
+Next token is token number (13.2: 2)
+Shifting token number (13.2: 2)
+Entering state 1
+Stack now 0 6 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (13.2: 2)
+-> $$ = nterm exp (13.2: 2)
+Entering state 12
+Stack now 0 6 4 12
+Return for a new token:
+Reading a token
+Next token is token '^' (13.3: )
+Shifting token '^' (13.3: )
+Entering state 24
+Stack now 0 6 4 12 24
+Return for a new token:
+Reading a token
+Next token is token number (13.4: 2)
+Shifting token number (13.4: 2)
+Entering state 1
+Stack now 0 6 4 12 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (13.4: 2)
+-> $$ = nterm exp (13.4: 2)
+Entering state 33
+Stack now 0 6 4 12 24 33
+Return for a new token:
+Reading a token
+Next token is token ')' (13.5: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (13.2: 2)
+   $2 = token '^' (13.3: )
+   $3 = nterm exp (13.4: 2)
+-> $$ = nterm exp (13.2-4: 4)
+Entering state 12
+Stack now 0 6 4 12
+Next token is token ')' (13.5: )
+Shifting token ')' (13.5: )
+Entering state 27
+Stack now 0 6 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (13.1: )
+   $2 = nterm exp (13.2-4: 4)
+   $3 = token ')' (13.5: )
+-> $$ = nterm exp (13.1-5: 4)
+Entering state 8
+Stack now 0 6 8
+Return for a new token:
+Reading a token
+Next token is token '^' (13.6: )
+Shifting token '^' (13.6: )
+Entering state 24
+Stack now 0 6 8 24
+Return for a new token:
+Reading a token
+Next token is token number (13.7: 3)
+Shifting token number (13.7: 3)
+Entering state 1
+Stack now 0 6 8 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (13.7: 3)
+-> $$ = nterm exp (13.7: 3)
+Entering state 33
+Stack now 0 6 8 24 33
+Return for a new token:
+Reading a token
+Next token is token '=' (13.9: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (13.1-5: 4)
+   $2 = token '^' (13.6: )
+   $3 = nterm exp (13.7: 3)
+-> $$ = nterm exp (13.1-7: 64)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (13.9: )
+Shifting token '=' (13.9: )
+Entering state 19
+Stack now 0 6 8 19
+Return for a new token:
+Reading a token
+Next token is token number (13.11-12: 64)
+Shifting token number (13.11-12: 64)
+Entering state 1
+Stack now 0 6 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (13.11-12: 64)
+-> $$ = nterm exp (13.11-12: 64)
+Entering state 28
+Stack now 0 6 8 19 28
+Return for a new token:
+Reading a token
+Next token is token '\n' (13.13-14.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (13.1-7: 64)
+   $2 = token '=' (13.9: )
+   $3 = nterm exp (13.11-12: 64)
+-> $$ = nterm exp (13.1-12: 64)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (13.13-14.0: )
+Shifting token '\n' (13.13-14.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (13.1-12: 64)
+   $2 = token '\n' (13.13-14.0: )
+-> $$ = nterm line (13.1-14.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-13.0: )
+   $2 = nterm line (13.1-14.0: )
+-> $$ = nterm input (1.1-14.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Now at end of input.
+Shifting token end of file (14.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (14.1: )
+Cleanup: popping nterm input (1.1-14.0: )
+./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+491. calc.at:1363: testing Calculator %no-lines api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
+./calc.at:1363:
+    if "$POSIXLY_CORRECT_IS_EXPORTED"; then
+      sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y
+    else
+      mv calc.y.tmp calc.y
+    fi
+
+
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token ')' (1.2: )
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token ')' (1.2: )
+Shifting token ')' (1.2: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 139):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.2: )
+-> $$ = nterm exp (1.1-2: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '(' (1.6: )
+Shifting token '(' (1.6: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+Next token is token number (1.7: 1)
+Shifting token number (1.7: 1)
+Entering state 1
+Stack now 0 8 21 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.7: 1)
+-> $$ = nterm exp (1.7: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Reading a token
+Next token is token '+' (1.9: )
+Shifting token '+' (1.9: )
+Entering state 21
+Stack now 0 8 21 4 12 21
+Reading a token
+Next token is token number (1.11: 1)
+Shifting token number (1.11: 1)
+Entering state 1
+Stack now 0 8 21 4 12 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.11: 1)
+-> $$ = nterm exp (1.11: 1)
+Entering state 30
+Stack now 0 8 21 4 12 21 30
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack by rule 7 (line 118):
+   $1 = nterm exp (1.7: 1)
+   $2 = token '+' (1.9: )
+   $3 = nterm exp (1.11: 1)
+-> $$ = nterm exp (1.7-11: 2)
+Entering state 12
+Stack now 0 8 21 4 12
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
+Stack now 0 8 21 4 12 21
+Reading a token
+Next token is token number (1.15: 1)
+Shifting token number (1.15: 1)
+Entering state 1
+Stack now 0 8 21 4 12 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.15: 1)
+-> $$ = nterm exp (1.15: 1)
+Entering state 30
+Stack now 0 8 21 4 12 21 30
+Reading a token
+Next token is token '+' (1.17: )
+Reducing stack by rule 7 (line 118):
+   $1 = nterm exp (1.7-11: 2)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15: 1)
+-> $$ = nterm exp (1.7-15: 3)
+Entering state 12
+Stack now 0 8 21 4 12
+Next token is token '+' (1.17: )
+Shifting token '+' (1.17: )
+Entering state 21
+Stack now 0 8 21 4 12 21
+Reading a token
+Next token is token ')' (1.18: )
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' (1.17: )
+Stack now 0 8 21 4 12
 Error: popping nterm exp (1.7-15: 3)
 Stack now 0 8 21 4
 Shifting token error (1.7-18: )
@@ -49493,7 +49863,7 @@
 Shifting token ')' (1.18: )
 Entering state 26
 Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.6: )
    $2 = token error (1.7-18: )
    $3 = token ')' (1.18: )
@@ -49502,7 +49872,7 @@
 Stack now 0 8 21 30
 Reading a token
 Next token is token '+' (1.20: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 118):
    $1 = nterm exp (1.1-2: 1111)
    $2 = token '+' (1.4: )
    $3 = nterm exp (1.6-18: 1111)
@@ -49552,7 +49922,7 @@
 Shifting token ')' (1.28: )
 Entering state 26
 Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.22: )
    $2 = token error (1.23-27: )
    $3 = token ')' (1.28: )
@@ -49561,7 +49931,7 @@
 Stack now 0 8 21 30
 Reading a token
 Next token is token '+' (1.30: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 118):
    $1 = nterm exp (1.1-18: 2222)
    $2 = token '+' (1.20: )
    $3 = nterm exp (1.22-28: 1111)
@@ -49604,7 +49974,7 @@
 Stack now 0 8 21 4 12 22 31
 Reading a token
 Next token is token '*' (1.39: )
-Reducing stack by rule 9 (line 114):
+Reducing stack by rule 9 (line 120):
    $1 = nterm exp (1.33: 1)
    $2 = token '*' (1.35: )
    $3 = nterm exp (1.37: 2)
@@ -49637,7 +50007,7 @@
 Shifting token ')' (1.42: )
 Entering state 26
 Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.32: )
    $2 = token error (1.33-41: )
    $3 = token ')' (1.42: )
@@ -49646,7 +50016,7 @@
 Stack now 0 8 21 30
 Reading a token
 Next token is token '=' (1.44: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 118):
    $1 = nterm exp (1.1-28: 3333)
    $2 = token '+' (1.30: )
    $3 = nterm exp (1.32-42: 1111)
@@ -49700,8 +50070,7 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1355: "$PERL" -pi -e 'use strict;
+./calc.at:1357: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -49711,8 +50080,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
 stderr:
-./calc.at:1355: cat stderr
+stderr:
+./calc.at:1357: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -49731,7 +50103,7 @@
 Shifting token ')' (1.2: )
 Entering state 26
 Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.1: )
    $2 = token error (1.2: )
    $3 = token ')' (1.2: )
@@ -49775,7 +50147,7 @@
 Stack now 0 8 21 4 12 21 30
 Reading a token
 Next token is token '+' (1.13: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 118):
    $1 = nterm exp (1.7: 1)
    $2 = token '+' (1.9: )
    $3 = nterm exp (1.11: 1)
@@ -49798,7 +50170,7 @@
 Stack now 0 8 21 4 12 21 30
 Reading a token
 Next token is token '+' (1.17: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 118):
    $1 = nterm exp (1.7-11: 2)
    $2 = token '+' (1.13: )
    $3 = nterm exp (1.15: 1)
@@ -49823,7 +50195,7 @@
 Shifting token ')' (1.18: )
 Entering state 26
 Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.6: )
    $2 = token error (1.7-18: )
    $3 = token ')' (1.18: )
@@ -49832,7 +50204,7 @@
 Stack now 0 8 21 30
 Reading a token
 Next token is token '+' (1.20: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 118):
    $1 = nterm exp (1.1-2: 1111)
    $2 = token '+' (1.4: )
    $3 = nterm exp (1.6-18: 1111)
@@ -49882,7 +50254,7 @@
 Shifting token ')' (1.28: )
 Entering state 26
 Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.22: )
    $2 = token error (1.23-27: )
    $3 = token ')' (1.28: )
@@ -49891,7 +50263,7 @@
 Stack now 0 8 21 30
 Reading a token
 Next token is token '+' (1.30: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 118):
    $1 = nterm exp (1.1-18: 2222)
    $2 = token '+' (1.20: )
    $3 = nterm exp (1.22-28: 1111)
@@ -49934,7 +50306,7 @@
 Stack now 0 8 21 4 12 22 31
 Reading a token
 Next token is token '*' (1.39: )
-Reducing stack by rule 9 (line 114):
+Reducing stack by rule 9 (line 120):
    $1 = nterm exp (1.33: 1)
    $2 = token '*' (1.35: )
    $3 = nterm exp (1.37: 2)
@@ -49967,7 +50339,7 @@
 Shifting token ')' (1.42: )
 Entering state 26
 Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.32: )
    $2 = token error (1.33-41: )
    $3 = token ')' (1.42: )
@@ -49976,7 +50348,7 @@
 Stack now 0 8 21 30
 Reading a token
 Next token is token '=' (1.44: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 118):
    $1 = nterm exp (1.1-28: 3333)
    $2 = token '+' (1.30: )
    $3 = nterm exp (1.32-42: 1111)
@@ -50030,144 +50402,103 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1355:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1357: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token '!' (1.2: )
-Shifting token '!' (1.2: )
-Entering state 5
-Stack now 0 4 5
-Reading a token
-Next token is token '!' (1.3: )
-Shifting token '!' (1.3: )
-Entering state 16
-Stack now 0 4 5 16
-Reducing stack by rule 16 (line 141):
-   $1 = token '!' (1.2: )
-   $2 = token '!' (1.3: )
-Stack now 0 4
-Shifting token error (1.2-3: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.4: )
-Shifting token ')' (1.4: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-3: )
-   $3 = token ')' (1.4: )
--> $$ = nterm exp (1.1-4: 1111)
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
 Stack now 0 8
+Return for a new token:
 Reading a token
-Next token is token '+' (1.6: )
-Shifting token '+' (1.6: )
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
 Entering state 21
 Stack now 0 8 21
+Return for a new token:
 Reading a token
-Next token is token '(' (1.8: )
-Shifting token '(' (1.8: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token number (1.9: 1)
-Shifting token number (1.9: 1)
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
 Entering state 1
-Stack now 0 8 21 4 1
+Stack now 0 8 21 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 1)
--> $$ = nterm exp (1.9: 1)
-Entering state 12
-Stack now 0 8 21 4 12
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
+Return for a new token:
 Reading a token
-Next token is token number (1.11: 2)
-1.11: syntax error, unexpected number
-Error: popping nterm exp (1.9: 1)
-Stack now 0 8 21 4
-Shifting token error (1.9-11: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token number (1.11: 2)
-Error: discarding token number (1.11: 2)
-Error: popping token error (1.9-11: )
-Stack now 0 8 21 4
-Shifting token error (1.9-11: )
-Entering state 11
-Stack now 0 8 21 4 11
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
+Return for a new token:
 Reading a token
-Next token is token ')' (1.12: )
-Shifting token ')' (1.12: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.8: )
-   $2 = token error (1.9-11: )
-   $3 = token ')' (1.12: )
--> $$ = nterm exp (1.8-12: 1111)
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Return for a new token:
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
 Entering state 30
 Stack now 0 8 21 30
-Reading a token
-Next token is token '=' (1.14: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.1-4: 1111)
-   $2 = token '+' (1.6: )
-   $3 = nterm exp (1.8-12: 1111)
--> $$ = nterm exp (1.1-12: 2222)
+Next token is token '=' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
 Stack now 0 8
-Next token is token '=' (1.14: )
-Shifting token '=' (1.14: )
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
 Entering state 19
 Stack now 0 8 19
+Return for a new token:
 Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
+Next token is token number (1.13: 7)
+Shifting token number (1.13: 7)
 Entering state 1
 Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
+   $1 = token number (1.13: 7)
+-> $$ = nterm exp (1.13: 7)
 Entering state 28
 Stack now 0 8 19 28
+Return for a new token:
 Reading a token
-Next token is token '\n' (1.17-2.0: )
+Next token is token '\n' (1.14-2.0: )
 Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-12: 2222)
-   $2 = token '=' (1.14: )
-   $3 = nterm exp (1.16: 1)
-1.1-16: error: 2222 != 1
--> $$ = nterm exp (1.1-16: 2222)
+   $1 = nterm exp (1.1-9: 7)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13: 7)
+-> $$ = nterm exp (1.1-13: 7)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
+Next token is token '\n' (1.14-2.0: )
+Shifting token '\n' (1.14-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
+   $1 = nterm exp (1.1-13: 7)
+   $2 = token '\n' (1.14-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -50176,157 +50507,1003 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
+Return for a new token:
 Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1357: cat stderr
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token number (2.1: 1)
+Shifting token number (2.1: 1)
+Entering state 1
+Stack now 0 6 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (2.1: 1)
+-> $$ = nterm exp (2.1: 1)
+Entering state 8
+Stack now 0 6 8
+Return for a new token:
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
+Next token is token '+' (2.3: )
+Shifting token '+' (2.3: )
+Entering state 21
+Stack now 0 6 8 21
+Return for a new token:
+Reading a token
+Next token is token number (2.5: 2)
+Shifting token number (2.5: 2)
+Entering state 1
+Stack now 0 6 8 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (2.5: 2)
+-> $$ = nterm exp (2.5: 2)
+Entering state 30
+Stack now 0 6 8 21 30
+Return for a new token:
+Reading a token
+Next token is token '*' (2.7: )
+Shifting token '*' (2.7: )
+Entering state 22
+Stack now 0 6 8 21 30 22
+Return for a new token:
+Reading a token
+Next token is token '-' (2.9: )
+Shifting token '-' (2.9: )
+Entering state 2
+Stack now 0 6 8 21 30 22 2
+Return for a new token:
+Reading a token
+Next token is token number (2.10: 3)
+Shifting token number (2.10: 3)
+Entering state 1
+Stack now 0 6 8 21 30 22 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (2.10: 3)
+-> $$ = nterm exp (2.10: 3)
+Entering state 10
+Stack now 0 6 8 21 30 22 2 10
+Return for a new token:
+Reading a token
+Next token is token '=' (2.12: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (2.9: )
+   $2 = nterm exp (2.10: 3)
+-> $$ = nterm exp (2.9-10: -3)
+Entering state 31
+Stack now 0 6 8 21 30 22 31
+Next token is token '=' (2.12: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (2.5: 2)
+   $2 = token '*' (2.7: )
+   $3 = nterm exp (2.9-10: -3)
+-> $$ = nterm exp (2.5-10: -6)
+Entering state 30
+Stack now 0 6 8 21 30
+Next token is token '=' (2.12: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (2.1: 1)
+   $2 = token '+' (2.3: )
+   $3 = nterm exp (2.5-10: -6)
+-> $$ = nterm exp (2.1-10: -5)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (2.12: )
+Shifting token '=' (2.12: )
+Entering state 19
+Stack now 0 6 8 19
+Return for a new token:
+Reading a token
+Next token is token '-' (2.14: )
+Shifting token '-' (2.14: )
+Entering state 2
+Stack now 0 6 8 19 2
+Return for a new token:
+Reading a token
+Next token is token number (2.15: 5)
+Shifting token number (2.15: 5)
+Entering state 1
+Stack now 0 6 8 19 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (2.15: 5)
+-> $$ = nterm exp (2.15: 5)
+Entering state 10
+Stack now 0 6 8 19 2 10
+Return for a new token:
+Reading a token
+Next token is token '\n' (2.16-3.0: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (2.14: )
+   $2 = nterm exp (2.15: 5)
+-> $$ = nterm exp (2.14-15: -5)
+Entering state 28
+Stack now 0 6 8 19 28
+Next token is token '\n' (2.16-3.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (2.1-10: -5)
+   $2 = token '=' (2.12: )
+   $3 = nterm exp (2.14-15: -5)
+-> $$ = nterm exp (2.1-15: -5)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (2.16-3.0: )
+Shifting token '\n' (2.16-3.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (2.1-15: -5)
+   $2 = token '\n' (2.16-3.0: )
+-> $$ = nterm line (2.1-3.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-2.0: )
+   $2 = nterm line (2.1-3.0: )
+-> $$ = nterm input (1.1-3.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Next token is token '\n' (3.1-4.0: )
+Shifting token '\n' (3.1-4.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (3.1-4.0: )
+-> $$ = nterm line (3.1-4.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-3.0: )
+   $2 = nterm line (3.1-4.0: )
+-> $$ = nterm input (1.1-4.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Next token is token '-' (4.1: )
+Shifting token '-' (4.1: )
+Entering state 2
+Stack now 0 6 2
+Return for a new token:
+Reading a token
+Next token is token number (4.2: 1)
+Shifting token number (4.2: 1)
+Entering state 1
+Stack now 0 6 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (4.2: 1)
+-> $$ = nterm exp (4.2: 1)
+Entering state 10
+Stack now 0 6 2 10
+Return for a new token:
+Reading a token
+Next token is token '^' (4.3: )
+Shifting token '^' (4.3: )
+Entering state 24
+Stack now 0 6 2 10 24
+Return for a new token:
+Reading a token
+Next token is token number (4.4: 2)
+Shifting token number (4.4: 2)
+Entering state 1
+Stack now 0 6 2 10 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (4.4: 2)
+-> $$ = nterm exp (4.4: 2)
+Entering state 33
+Stack now 0 6 2 10 24 33
+Return for a new token:
+Reading a token
+Next token is token '=' (4.6: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (4.2: 1)
+   $2 = token '^' (4.3: )
+   $3 = nterm exp (4.4: 2)
+-> $$ = nterm exp (4.2-4: 1)
+Entering state 10
+Stack now 0 6 2 10
+Next token is token '=' (4.6: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (4.1: )
+   $2 = nterm exp (4.2-4: 1)
+-> $$ = nterm exp (4.1-4: -1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (4.6: )
+Shifting token '=' (4.6: )
+Entering state 19
+Stack now 0 6 8 19
+Return for a new token:
+Reading a token
+Next token is token '-' (4.8: )
+Shifting token '-' (4.8: )
+Entering state 2
+Stack now 0 6 8 19 2
+Return for a new token:
+Reading a token
+Next token is token number (4.9: 1)
+Shifting token number (4.9: 1)
+Entering state 1
+Stack now 0 6 8 19 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (4.9: 1)
+-> $$ = nterm exp (4.9: 1)
+Entering state 10
+Stack now 0 6 8 19 2 10
+Return for a new token:
+Reading a token
+Next token is token '\n' (4.10-5.0: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (4.8: )
+   $2 = nterm exp (4.9: 1)
+-> $$ = nterm exp (4.8-9: -1)
+Entering state 28
+Stack now 0 6 8 19 28
+Next token is token '\n' (4.10-5.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (4.1-4: -1)
+   $2 = token '=' (4.6: )
+   $3 = nterm exp (4.8-9: -1)
+-> $$ = nterm exp (4.1-9: -1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (4.10-5.0: )
+Shifting token '\n' (4.10-5.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (4.1-9: -1)
+   $2 = token '\n' (4.10-5.0: )
+-> $$ = nterm line (4.1-5.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-4.0: )
+   $2 = nterm line (4.1-5.0: )
+-> $$ = nterm input (1.1-5.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Next token is token '(' (5.1: )
+Shifting token '(' (5.1: )
 Entering state 4
-Stack now 0 4
+Stack now 0 6 4
+Return for a new token:
 Reading a token
-Next token is token '!' (1.2: )
-Shifting token '!' (1.2: )
-Entering state 5
-Stack now 0 4 5
+Next token is token '-' (5.2: )
+Shifting token '-' (5.2: )
+Entering state 2
+Stack now 0 6 4 2
+Return for a new token:
 Reading a token
-Next token is token '!' (1.3: )
-Shifting token '!' (1.3: )
-Entering state 16
-Stack now 0 4 5 16
-Reducing stack by rule 16 (line 141):
-   $1 = token '!' (1.2: )
-   $2 = token '!' (1.3: )
-Stack now 0 4
-Shifting token error (1.2-3: )
-Entering state 11
-Stack now 0 4 11
+Next token is token number (5.3: 1)
+Shifting token number (5.3: 1)
+Entering state 1
+Stack now 0 6 4 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (5.3: 1)
+-> $$ = nterm exp (5.3: 1)
+Entering state 10
+Stack now 0 6 4 2 10
+Return for a new token:
 Reading a token
-Next token is token ')' (1.4: )
-Shifting token ')' (1.4: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-3: )
-   $3 = token ')' (1.4: )
--> $$ = nterm exp (1.1-4: 1111)
+Next token is token ')' (5.4: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (5.2: )
+   $2 = nterm exp (5.3: 1)
+-> $$ = nterm exp (5.2-3: -1)
+Entering state 12
+Stack now 0 6 4 12
+Next token is token ')' (5.4: )
+Shifting token ')' (5.4: )
+Entering state 27
+Stack now 0 6 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (5.1: )
+   $2 = nterm exp (5.2-3: -1)
+   $3 = token ')' (5.4: )
+-> $$ = nterm exp (5.1-4: -1)
 Entering state 8
-Stack now 0 8
+Stack now 0 6 8
+Return for a new token:
 Reading a token
-Next token is token '+' (1.6: )
-Shifting token '+' (1.6: )
-Entering state 21
-Stack now 0 8 21
+Next token is token '^' (5.5: )
+Shifting token '^' (5.5: )
+Entering state 24
+Stack now 0 6 8 24
+Return for a new token:
 Reading a token
-Next token is token '(' (1.8: )
-Shifting token '(' (1.8: )
+Next token is token number (5.6: 2)
+Shifting token number (5.6: 2)
+Entering state 1
+Stack now 0 6 8 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (5.6: 2)
+-> $$ = nterm exp (5.6: 2)
+Entering state 33
+Stack now 0 6 8 24 33
+Return for a new token:
+Reading a token
+Next token is token '=' (5.8: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (5.1-4: -1)
+   $2 = token '^' (5.5: )
+   $3 = nterm exp (5.6: 2)
+-> $$ = nterm exp (5.1-6: 1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (5.8: )
+Shifting token '=' (5.8: )
+Entering state 19
+Stack now 0 6 8 19
+Return for a new token:
+Reading a token
+Next token is token number (5.10: 1)
+Shifting token number (5.10: 1)
+Entering state 1
+Stack now 0 6 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (5.10: 1)
+-> $$ = nterm exp (5.10: 1)
+Entering state 28
+Stack now 0 6 8 19 28
+Return for a new token:
+Reading a token
+Next token is token '\n' (5.11-6.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (5.1-6: 1)
+   $2 = token '=' (5.8: )
+   $3 = nterm exp (5.10: 1)
+-> $$ = nterm exp (5.1-10: 1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (5.11-6.0: )
+Shifting token '\n' (5.11-6.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (5.1-10: 1)
+   $2 = token '\n' (5.11-6.0: )
+-> $$ = nterm line (5.1-6.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-5.0: )
+   $2 = nterm line (5.1-6.0: )
+-> $$ = nterm input (1.1-6.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Next token is token '\n' (6.1-7.0: )
+Shifting token '\n' (6.1-7.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (6.1-7.0: )
+-> $$ = nterm line (6.1-7.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-6.0: )
+   $2 = nterm line (6.1-7.0: )
+-> $$ = nterm input (1.1-7.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Next token is token '-' (7.1: )
+Shifting token '-' (7.1: )
+Entering state 2
+Stack now 0 6 2
+Return for a new token:
+Reading a token
+Next token is token '-' (7.2: )
+Shifting token '-' (7.2: )
+Entering state 2
+Stack now 0 6 2 2
+Return for a new token:
+Reading a token
+Next token is token '-' (7.3: )
+Shifting token '-' (7.3: )
+Entering state 2
+Stack now 0 6 2 2 2
+Return for a new token:
+Reading a token
+Next token is token number (7.4: 1)
+Shifting token number (7.4: 1)
+Entering state 1
+Stack now 0 6 2 2 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (7.4: 1)
+-> $$ = nterm exp (7.4: 1)
+Entering state 10
+Stack now 0 6 2 2 2 10
+Return for a new token:
+Reading a token
+Next token is token '=' (7.6: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (7.3: )
+   $2 = nterm exp (7.4: 1)
+-> $$ = nterm exp (7.3-4: -1)
+Entering state 10
+Stack now 0 6 2 2 10
+Next token is token '=' (7.6: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (7.2: )
+   $2 = nterm exp (7.3-4: -1)
+-> $$ = nterm exp (7.2-4: 1)
+Entering state 10
+Stack now 0 6 2 10
+Next token is token '=' (7.6: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (7.1: )
+   $2 = nterm exp (7.2-4: 1)
+-> $$ = nterm exp (7.1-4: -1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (7.6: )
+Shifting token '=' (7.6: )
+Entering state 19
+Stack now 0 6 8 19
+Return for a new token:
+Reading a token
+Next token is token '-' (7.8: )
+Shifting token '-' (7.8: )
+Entering state 2
+Stack now 0 6 8 19 2
+Return for a new token:
+Reading a token
+Next token is token number (7.9: 1)
+Shifting token number (7.9: 1)
+Entering state 1
+Stack now 0 6 8 19 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (7.9: 1)
+-> $$ = nterm exp (7.9: 1)
+Entering state 10
+Stack now 0 6 8 19 2 10
+Return for a new token:
+Reading a token
+Next token is token '\n' (7.10-8.0: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (7.8: )
+   $2 = nterm exp (7.9: 1)
+-> $$ = nterm exp (7.8-9: -1)
+Entering state 28
+Stack now 0 6 8 19 28
+Next token is token '\n' (7.10-8.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (7.1-4: -1)
+   $2 = token '=' (7.6: )
+   $3 = nterm exp (7.8-9: -1)
+-> $$ = nterm exp (7.1-9: -1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (7.10-8.0: )
+Shifting token '\n' (7.10-8.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (7.1-9: -1)
+   $2 = token '\n' (7.10-8.0: )
+-> $$ = nterm line (7.1-8.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-7.0: )
+   $2 = nterm line (7.1-8.0: )
+-> $$ = nterm input (1.1-8.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Next token is token '\n' (8.1-9.0: )
+Shifting token '\n' (8.1-9.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (8.1-9.0: )
+-> $$ = nterm line (8.1-9.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-8.0: )
+   $2 = nterm line (8.1-9.0: )
+-> $$ = nterm input (1.1-9.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Next token is token number (9.1: 1)
+Shifting token number (9.1: 1)
+Entering state 1
+Stack now 0 6 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (9.1: 1)
+-> $$ = nterm exp (9.1: 1)
+Entering state 8
+Stack now 0 6 8
+Return for a new token:
+Reading a token
+Next token is token '-' (9.3: )
+Shifting token '-' (9.3: )
+Entering state 20
+Stack now 0 6 8 20
+Return for a new token:
+Reading a token
+Next token is token number (9.5: 2)
+Shifting token number (9.5: 2)
+Entering state 1
+Stack now 0 6 8 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (9.5: 2)
+-> $$ = nterm exp (9.5: 2)
+Entering state 29
+Stack now 0 6 8 20 29
+Return for a new token:
+Reading a token
+Next token is token '-' (9.7: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (9.1: 1)
+   $2 = token '-' (9.3: )
+   $3 = nterm exp (9.5: 2)
+-> $$ = nterm exp (9.1-5: -1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '-' (9.7: )
+Shifting token '-' (9.7: )
+Entering state 20
+Stack now 0 6 8 20
+Return for a new token:
+Reading a token
+Next token is token number (9.9: 3)
+Shifting token number (9.9: 3)
+Entering state 1
+Stack now 0 6 8 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (9.9: 3)
+-> $$ = nterm exp (9.9: 3)
+Entering state 29
+Stack now 0 6 8 20 29
+Return for a new token:
+Reading a token
+Next token is token '=' (9.11: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (9.1-5: -1)
+   $2 = token '-' (9.7: )
+   $3 = nterm exp (9.9: 3)
+-> $$ = nterm exp (9.1-9: -4)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (9.11: )
+Shifting token '=' (9.11: )
+Entering state 19
+Stack now 0 6 8 19
+Return for a new token:
+Reading a token
+Next token is token '-' (9.13: )
+Shifting token '-' (9.13: )
+Entering state 2
+Stack now 0 6 8 19 2
+Return for a new token:
+Reading a token
+Next token is token number (9.14: 4)
+Shifting token number (9.14: 4)
+Entering state 1
+Stack now 0 6 8 19 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (9.14: 4)
+-> $$ = nterm exp (9.14: 4)
+Entering state 10
+Stack now 0 6 8 19 2 10
+Return for a new token:
+Reading a token
+Next token is token '\n' (9.15-10.0: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (9.13: )
+   $2 = nterm exp (9.14: 4)
+-> $$ = nterm exp (9.13-14: -4)
+Entering state 28
+Stack now 0 6 8 19 28
+Next token is token '\n' (9.15-10.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (9.1-9: -4)
+   $2 = token '=' (9.11: )
+   $3 = nterm exp (9.13-14: -4)
+-> $$ = nterm exp (9.1-14: -4)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (9.15-10.0: )
+Shifting token '\n' (9.15-10.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (9.1-14: -4)
+   $2 = token '\n' (9.15-10.0: )
+-> $$ = nterm line (9.1-10.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-9.0: )
+   $2 = nterm line (9.1-10.0: )
+-> $$ = nterm input (1.1-10.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Next token is token number (10.1: 1)
+Shifting token number (10.1: 1)
+Entering state 1
+Stack now 0 6 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (10.1: 1)
+-> $$ = nterm exp (10.1: 1)
+Entering state 8
+Stack now 0 6 8
+Return for a new token:
+Reading a token
+Next token is token '-' (10.3: )
+Shifting token '-' (10.3: )
+Entering state 20
+Stack now 0 6 8 20
+Return for a new token:
+Reading a token
+Next token is token '(' (10.5: )
+Shifting token '(' (10.5: )
 Entering state 4
-Stack now 0 8 21 4
+Stack now 0 6 8 20 4
+Return for a new token:
 Reading a token
-Next token is token number (1.9: 1)
-Shifting token number (1.9: 1)
+Next token is token number (10.6: 2)
+Shifting token number (10.6: 2)
 Entering state 1
-Stack now 0 8 21 4 1
+Stack now 0 6 8 20 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 1)
--> $$ = nterm exp (1.9: 1)
+   $1 = token number (10.6: 2)
+-> $$ = nterm exp (10.6: 2)
 Entering state 12
-Stack now 0 8 21 4 12
+Stack now 0 6 8 20 4 12
+Return for a new token:
 Reading a token
-Next token is token number (1.11: 2)
-1.11: syntax error, unexpected number
-Error: popping nterm exp (1.9: 1)
-Stack now 0 8 21 4
-Shifting token error (1.9-11: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token number (1.11: 2)
-Error: discarding token number (1.11: 2)
-Error: popping token error (1.9-11: )
-Stack now 0 8 21 4
-Shifting token error (1.9-11: )
-Entering state 11
-Stack now 0 8 21 4 11
+Next token is token '-' (10.8: )
+Shifting token '-' (10.8: )
+Entering state 20
+Stack now 0 6 8 20 4 12 20
+Return for a new token:
 Reading a token
-Next token is token ')' (1.12: )
-Shifting token ')' (1.12: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.8: )
-   $2 = token error (1.9-11: )
-   $3 = token ')' (1.12: )
--> $$ = nterm exp (1.8-12: 1111)
-Entering state 30
-Stack now 0 8 21 30
+Next token is token number (10.10: 3)
+Shifting token number (10.10: 3)
+Entering state 1
+Stack now 0 6 8 20 4 12 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (10.10: 3)
+-> $$ = nterm exp (10.10: 3)
+Entering state 29
+Stack now 0 6 8 20 4 12 20 29
+Return for a new token:
 Reading a token
-Next token is token '=' (1.14: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.1-4: 1111)
-   $2 = token '+' (1.6: )
-   $3 = nterm exp (1.8-12: 1111)
--> $$ = nterm exp (1.1-12: 2222)
+Next token is token ')' (10.11: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (10.6: 2)
+   $2 = token '-' (10.8: )
+   $3 = nterm exp (10.10: 3)
+-> $$ = nterm exp (10.6-10: -1)
+Entering state 12
+Stack now 0 6 8 20 4 12
+Next token is token ')' (10.11: )
+Shifting token ')' (10.11: )
+Entering state 27
+Stack now 0 6 8 20 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (10.5: )
+   $2 = nterm exp (10.6-10: -1)
+   $3 = token ')' (10.11: )
+-> $$ = nterm exp (10.5-11: -1)
+Entering state 29
+Stack now 0 6 8 20 29
+Return for a new token:
+Reading a token
+Next token is token '=' (10.13: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (10.1: 1)
+   $2 = token '-' (10.3: )
+   $3 = nterm exp (10.5-11: -1)
+-> $$ = nterm exp (10.1-11: 2)
 Entering state 8
-Stack now 0 8
-Next token is token '=' (1.14: )
-Shifting token '=' (1.14: )
+Stack now 0 6 8
+Next token is token '=' (10.13: )
+Shifting token '=' (10.13: )
 Entering state 19
-Stack now 0 8 19
+Stack now 0 6 8 19
+Return for a new token:
 Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
+Next token is token number (10.15: 2)
+Shifting token number (10.15: 2)
 Entering state 1
-Stack now 0 8 19 1
+Stack now 0 6 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
+   $1 = token number (10.15: 2)
+-> $$ = nterm exp (10.15: 2)
 Entering state 28
-Stack now 0 8 19 28
+Stack now 0 6 8 19 28
+Return for a new token:
 Reading a token
-Next token is token '\n' (1.17-2.0: )
+Next token is token '\n' (10.16-11.0: )
 Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-12: 2222)
-   $2 = token '=' (1.14: )
-   $3 = nterm exp (1.16: 1)
-1.1-16: error: 2222 != 1
--> $$ = nterm exp (1.1-16: 2222)
+   $1 = nterm exp (10.1-11: 2)
+   $2 = token '=' (10.13: )
+   $3 = nterm exp (10.15: 2)
+-> $$ = nterm exp (10.1-15: 2)
 Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
+Stack now 0 6 8
+Next token is token '\n' (10.16-11.0: )
+Shifting token '\n' (10.16-11.0: )
 Entering state 25
-Stack now 0 8 25
+Stack now 0 6 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+   $1 = nterm exp (10.1-15: 2)
+   $2 = token '\n' (10.16-11.0: )
+-> $$ = nterm line (10.1-11.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-10.0: )
+   $2 = nterm line (10.1-11.0: )
+-> $$ = nterm input (1.1-11.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Next token is token '\n' (11.1-12.0: )
+Shifting token '\n' (11.1-12.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (11.1-12.0: )
+-> $$ = nterm line (11.1-12.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-11.0: )
+   $2 = nterm line (11.1-12.0: )
+-> $$ = nterm input (1.1-12.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Next token is token number (12.1: 2)
+Shifting token number (12.1: 2)
+Entering state 1
+Stack now 0 6 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (12.1: 2)
+-> $$ = nterm exp (12.1: 2)
+Entering state 8
+Stack now 0 6 8
+Return for a new token:
+Reading a token
+Next token is token '^' (12.2: )
+Shifting token '^' (12.2: )
+Entering state 24
+Stack now 0 6 8 24
+Return for a new token:
+Reading a token
+Next token is token number (12.3: 2)
+Shifting token number (12.3: 2)
+Entering state 1
+Stack now 0 6 8 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (12.3: 2)
+-> $$ = nterm exp (12.3: 2)
+Entering state 33
+Stack now 0 6 8 24 33
+Return for a new token:
+Reading a token
+Next token is token '^' (12.4: )
+Shifting token '^' (12.4: )
+Entering state 24
+Stack now 0 6 8 24 33 24
+Return for a new token:
+Reading a token
+Next token is token number (12.5: 3)
+Shifting token number (12.5: 3)
+Entering state 1
+Stack now 0 6 8 24 33 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (12.5: 3)
+-> $$ = nterm exp (12.5: 3)
+Entering state 33
+Stack now 0 6 8 24 33 24 33
+Return for a new token:
+Reading a token
+Next token is token '=' (12.7: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (12.3: 2)
+   $2 = token '^' (12.4: )
+   $3 = nterm exp (12.5: 3)
+-> $$ = nterm exp (12.3-5: 8)
+Entering state 33
+Stack now 0 6 8 24 33
+Next token is token '=' (12.7: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (12.1: 2)
+   $2 = token '^' (12.2: )
+   $3 = nterm exp (12.3-5: 8)
+-> $$ = nterm exp (12.1-5: 256)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (12.7: )
+Shifting token '=' (12.7: )
+Entering state 19
+Stack now 0 6 8 19
+Return for a new token:
+Reading a token
+Next token is token number (12.9-11: 256)
+Shifting token number (12.9-11: 256)
+Entering state 1
+Stack now 0 6 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (12.9-11: 256)
+-> $$ = nterm exp (12.9-11: 256)
+Entering state 28
+Stack now 0 6 8 19 28
+Return for a new token:
+Reading a token
+Next token is token '\n' (12.12-13.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (12.1-5: 256)
+   $2 = token '=' (12.7: )
+   $3 = nterm exp (12.9-11: 256)
+-> $$ = nterm exp (12.1-11: 256)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (12.12-13.0: )
+Shifting token '\n' (12.12-13.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (12.1-11: 256)
+   $2 = token '\n' (12.12-13.0: )
+-> $$ = nterm line (12.1-13.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-12.0: )
+   $2 = nterm line (12.1-13.0: )
+-> $$ = nterm input (1.1-13.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Next token is token '(' (13.1: )
+Shifting token '(' (13.1: )
+Entering state 4
+Stack now 0 6 4
+Return for a new token:
+Reading a token
+Next token is token number (13.2: 2)
+Shifting token number (13.2: 2)
+Entering state 1
+Stack now 0 6 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (13.2: 2)
+-> $$ = nterm exp (13.2: 2)
+Entering state 12
+Stack now 0 6 4 12
+Return for a new token:
+Reading a token
+Next token is token '^' (13.3: )
+Shifting token '^' (13.3: )
+Entering state 24
+Stack now 0 6 4 12 24
+Return for a new token:
+Reading a token
+Next token is token number (13.4: 2)
+Shifting token number (13.4: 2)
+Entering state 1
+Stack now 0 6 4 12 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (13.4: 2)
+-> $$ = nterm exp (13.4: 2)
+Entering state 33
+Stack now 0 6 4 12 24 33
+Return for a new token:
+Reading a token
+Next token is token ')' (13.5: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (13.2: 2)
+   $2 = token '^' (13.3: )
+   $3 = nterm exp (13.4: 2)
+-> $$ = nterm exp (13.2-4: 4)
+Entering state 12
+Stack now 0 6 4 12
+Next token is token ')' (13.5: )
+Shifting token ')' (13.5: )
+Entering state 27
+Stack now 0 6 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (13.1: )
+   $2 = nterm exp (13.2-4: 4)
+   $3 = token ')' (13.5: )
+-> $$ = nterm exp (13.1-5: 4)
+Entering state 8
+Stack now 0 6 8
+Return for a new token:
+Reading a token
+Next token is token '^' (13.6: )
+Shifting token '^' (13.6: )
+Entering state 24
+Stack now 0 6 8 24
+Return for a new token:
+Reading a token
+Next token is token number (13.7: 3)
+Shifting token number (13.7: 3)
+Entering state 1
+Stack now 0 6 8 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (13.7: 3)
+-> $$ = nterm exp (13.7: 3)
+Entering state 33
+Stack now 0 6 8 24 33
+Return for a new token:
+Reading a token
+Next token is token '=' (13.9: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (13.1-5: 4)
+   $2 = token '^' (13.6: )
+   $3 = nterm exp (13.7: 3)
+-> $$ = nterm exp (13.1-7: 64)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (13.9: )
+Shifting token '=' (13.9: )
+Entering state 19
+Stack now 0 6 8 19
+Return for a new token:
+Reading a token
+Next token is token number (13.11-12: 64)
+Shifting token number (13.11-12: 64)
+Entering state 1
+Stack now 0 6 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (13.11-12: 64)
+-> $$ = nterm exp (13.11-12: 64)
+Entering state 28
+Stack now 0 6 8 19 28
+Return for a new token:
+Reading a token
+Next token is token '\n' (13.13-14.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (13.1-7: 64)
+   $2 = token '=' (13.9: )
+   $3 = nterm exp (13.11-12: 64)
+-> $$ = nterm exp (13.1-12: 64)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (13.13-14.0: )
+Shifting token '\n' (13.13-14.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (13.1-12: 64)
+   $2 = token '\n' (13.13-14.0: )
+-> $$ = nterm line (13.1-14.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-13.0: )
+   $2 = nterm line (13.1-14.0: )
+-> $$ = nterm input (1.1-14.0: )
 Entering state 6
 Stack now 0 6
+Return for a new token:
 Reading a token
 Now at end of input.
-Shifting token end of file (2.1: )
+Shifting token end of file (14.1: )
 Entering state 17
 Stack now 0 6 17
 Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
+Cleanup: popping token end of file (14.1: )
+Cleanup: popping nterm input (1.1-14.0: )
+./calc.at:1358: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 input:
 ./calc.at:1355: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
@@ -50338,10 +51515,145 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | (!!) + (1 2) = 1
-./calc.at:1357:  $PREPARSER ./calc  input
+  | 1 2
+./calc.at:1358:  $PREPARSER ./calc  input
+input:
 stderr:
 ./calc.at:1355: cat stderr
+  | 1 = 2 = 3
+./calc.at:1357:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Return for a new token:
+Reading a token
+Next token is token number (1.3: 2)
+1.3: syntax error, unexpected number
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token number (1.3: 2)
+Stack now 0
+./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Stack now 0 8 19
+Error: popping token '=' (1.3: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '=' (1.7: )
+Stack now 0
+./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+input:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Return for a new token:
+Reading a token
+Next token is token number (1.3: 2)
+1.3: syntax error, unexpected number
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token number (1.3: 2)
+Stack now 0
+stderr:
+  | (!!) + (1 2) = 1
+./calc.at:1355:  $PREPARSER ./calc  input
+stderr:
+stdout:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Stack now 0 8 19
+Error: popping token '=' (1.3: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '=' (1.7: )
+Stack now 0
+stderr:
+./calc.at:1360: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
 Starting parse
 Entering state 0
 Stack now 0
@@ -50360,7 +51672,7 @@
 Shifting token '!' (1.3: )
 Entering state 16
 Stack now 0 4 5 16
-Reducing stack by rule 16 (line 129):
+Reducing stack by rule 16 (line 141):
    $1 = token '!' (1.2: )
    $2 = token '!' (1.3: )
 Stack now 0 4
@@ -50372,7 +51684,7 @@
 Shifting token ')' (1.4: )
 Entering state 26
 Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.1: )
    $2 = token error (1.2-3: )
    $3 = token ')' (1.4: )
@@ -50419,7 +51731,7 @@
 Shifting token ')' (1.12: )
 Entering state 26
 Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.8: )
    $2 = token error (1.9-11: )
    $3 = token ')' (1.12: )
@@ -50428,7 +51740,7 @@
 Stack now 0 8 21 30
 Reading a token
 Next token is token '=' (1.14: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 118):
    $1 = nterm exp (1.1-4: 1111)
    $2 = token '+' (1.6: )
    $3 = nterm exp (1.8-12: 1111)
@@ -50482,8 +51794,31 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1358: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1360: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c calc.h
+
+./calc.at:1363: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
 stderr:
+./calc.at:1358: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -50502,7 +51837,7 @@
 Shifting token '!' (1.3: )
 Entering state 16
 Stack now 0 4 5 16
-Reducing stack by rule 16 (line 129):
+Reducing stack by rule 16 (line 141):
    $1 = token '!' (1.2: )
    $2 = token '!' (1.3: )
 Stack now 0 4
@@ -50514,7 +51849,7 @@
 Shifting token ')' (1.4: )
 Entering state 26
 Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.1: )
    $2 = token error (1.2-3: )
    $3 = token ')' (1.4: )
@@ -50561,7 +51896,7 @@
 Shifting token ')' (1.12: )
 Entering state 26
 Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.8: )
    $2 = token error (1.9-11: )
    $3 = token ')' (1.12: )
@@ -50570,7 +51905,7 @@
 Stack now 0 8 21 30
 Reading a token
 Next token is token '=' (1.14: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 118):
    $1 = nterm exp (1.1-4: 1111)
    $2 = token '+' (1.6: )
    $3 = nterm exp (1.8-12: 1111)
@@ -50624,10 +51959,86 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1357: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1355: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+./calc.at:1357: cat stderr
+input:
+./calc.at:1355: cat stderr
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1360:  $PREPARSER ./calc  input
+  | 1//2
+./calc.at:1358:  $PREPARSER ./calc  input
 input:
   | (- *) + (1 2) = 1
 ./calc.at:1355:  $PREPARSER ./calc  input
 stderr:
+input:
+  | 
+  | +1
+./calc.at:1357:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Return for a new token:
+Reading a token
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 23
+Stack now 0 8 23
+Return for a new token:
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '/' (1.3: )
+Stack now 0
+./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -50776,150 +52187,99 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1357: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+stderr:
+./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
-Entering state 2
-Stack now 0 4 2
-Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Stack now 0 4 2 9
-Reducing stack by rule 15 (line 140):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Error: popping token error (1.2-4: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
 Entering state 21
 Stack now 0 8 21
 Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token number (1.10: 1)
-Shifting token number (1.10: 1)
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
 Entering state 1
-Stack now 0 8 21 4 1
+Stack now 0 8 21 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.10: 1)
--> $$ = nterm exp (1.10: 1)
-Entering state 12
-Stack now 0 8 21 4 12
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
 Reading a token
-Next token is token number (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Stack now 0 8 21 4
-Shifting token error (1.10-12: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token number (1.12: 2)
-Error: discarding token number (1.12: 2)
-Error: popping token error (1.10-12: )
-Stack now 0 8 21 4
-Shifting token error (1.10-12: )
-Entering state 11
-Stack now 0 8 21 4 11
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
 Reading a token
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
 Entering state 30
 Stack now 0 8 21 30
-Reading a token
-Next token is token '=' (1.15: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
+Next token is token '=' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
 Stack now 0 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
 Entering state 19
 Stack now 0 8 19
 Reading a token
-Next token is token number (1.17: 1)
-Shifting token number (1.17: 1)
+Next token is token number (1.13: 7)
+Shifting token number (1.13: 7)
 Entering state 1
 Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.17: 1)
--> $$ = nterm exp (1.17: 1)
+   $1 = token number (1.13: 7)
+-> $$ = nterm exp (1.13: 7)
 Entering state 28
 Stack now 0 8 19 28
 Reading a token
-Next token is token '\n' (1.18-2.0: )
+Next token is token '\n' (1.14-2.0: )
 Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
+   $1 = nterm exp (1.1-9: 7)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13: 7)
+-> $$ = nterm exp (1.1-13: 7)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (1.14-2.0: )
+Shifting token '\n' (1.14-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 2222)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-13: 7)
+   $2 = token '\n' (1.14-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -50929,317 +52289,964 @@
 Entering state 6
 Stack now 0 6
 Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1357: cat stderr
-./calc.at:1355: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-./calc.at:1355: cat stderr
-  | (- *) + (1 2) = 1
-input:
-./calc.at:1357:  $PREPARSER ./calc  input
-stderr:
-  | (* *) + (*) + (*)
-./calc.at:1355:  $PREPARSER ./calc  input
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token number (2.1: 1)
+Shifting token number (2.1: 1)
+Entering state 1
+Stack now 0 6 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (2.1: 1)
+-> $$ = nterm exp (2.1: 1)
+Entering state 8
+Stack now 0 6 8
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
+Next token is token '+' (2.3: )
+Shifting token '+' (2.3: )
+Entering state 21
+Stack now 0 6 8 21
 Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
+Next token is token number (2.5: 2)
+Shifting token number (2.5: 2)
+Entering state 1
+Stack now 0 6 8 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (2.5: 2)
+-> $$ = nterm exp (2.5: 2)
+Entering state 30
+Stack now 0 6 8 21 30
+Reading a token
+Next token is token '*' (2.7: )
+Shifting token '*' (2.7: )
+Entering state 22
+Stack now 0 6 8 21 30 22
+Reading a token
+Next token is token '-' (2.9: )
+Shifting token '-' (2.9: )
 Entering state 2
-Stack now 0 4 2
+Stack now 0 6 8 21 30 22 2
 Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Stack now 0 4 2 9
-Reducing stack by rule 15 (line 128):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Error: popping token error (1.2-4: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
+Next token is token number (2.10: 3)
+Shifting token number (2.10: 3)
+Entering state 1
+Stack now 0 6 8 21 30 22 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (2.10: 3)
+-> $$ = nterm exp (2.10: 3)
+Entering state 10
+Stack now 0 6 8 21 30 22 2 10
 Reading a token
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
+Next token is token '=' (2.12: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (2.9: )
+   $2 = nterm exp (2.10: 3)
+-> $$ = nterm exp (2.9-10: -3)
+Entering state 31
+Stack now 0 6 8 21 30 22 31
+Next token is token '=' (2.12: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (2.5: 2)
+   $2 = token '*' (2.7: )
+   $3 = nterm exp (2.9-10: -3)
+-> $$ = nterm exp (2.5-10: -6)
+Entering state 30
+Stack now 0 6 8 21 30
+Next token is token '=' (2.12: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (2.1: 1)
+   $2 = token '+' (2.3: )
+   $3 = nterm exp (2.5-10: -6)
+-> $$ = nterm exp (2.1-10: -5)
 Entering state 8
-Stack now 0 8
+Stack now 0 6 8
+Next token is token '=' (2.12: )
+Shifting token '=' (2.12: )
+Entering state 19
+Stack now 0 6 8 19
 Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
-Stack now 0 8 21
+Next token is token '-' (2.14: )
+Shifting token '-' (2.14: )
+Entering state 2
+Stack now 0 6 8 19 2
 Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
+Next token is token number (2.15: 5)
+Shifting token number (2.15: 5)
+Entering state 1
+Stack now 0 6 8 19 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (2.15: 5)
+-> $$ = nterm exp (2.15: 5)
+Entering state 10
+Stack now 0 6 8 19 2 10
+Reading a token
+Next token is token '\n' (2.16-3.0: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (2.14: )
+   $2 = nterm exp (2.15: 5)
+-> $$ = nterm exp (2.14-15: -5)
+Entering state 28
+Stack now 0 6 8 19 28
+Next token is token '\n' (2.16-3.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (2.1-10: -5)
+   $2 = token '=' (2.12: )
+   $3 = nterm exp (2.14-15: -5)
+-> $$ = nterm exp (2.1-15: -5)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (2.16-3.0: )
+Shifting token '\n' (2.16-3.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (2.1-15: -5)
+   $2 = token '\n' (2.16-3.0: )
+-> $$ = nterm line (2.1-3.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-2.0: )
+   $2 = nterm line (2.1-3.0: )
+-> $$ = nterm input (1.1-3.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '\n' (3.1-4.0: )
+Shifting token '\n' (3.1-4.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (3.1-4.0: )
+-> $$ = nterm line (3.1-4.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-3.0: )
+   $2 = nterm line (3.1-4.0: )
+-> $$ = nterm input (1.1-4.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '-' (4.1: )
+Shifting token '-' (4.1: )
+Entering state 2
+Stack now 0 6 2
+Reading a token
+Next token is token number (4.2: 1)
+Shifting token number (4.2: 1)
+Entering state 1
+Stack now 0 6 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (4.2: 1)
+-> $$ = nterm exp (4.2: 1)
+Entering state 10
+Stack now 0 6 2 10
+Reading a token
+Next token is token '^' (4.3: )
+Shifting token '^' (4.3: )
+Entering state 24
+Stack now 0 6 2 10 24
+Reading a token
+Next token is token number (4.4: 2)
+Shifting token number (4.4: 2)
+Entering state 1
+Stack now 0 6 2 10 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (4.4: 2)
+-> $$ = nterm exp (4.4: 2)
+Entering state 33
+Stack now 0 6 2 10 24 33
+Reading a token
+Next token is token '=' (4.6: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (4.2: 1)
+   $2 = token '^' (4.3: )
+   $3 = nterm exp (4.4: 2)
+-> $$ = nterm exp (4.2-4: 1)
+Entering state 10
+Stack now 0 6 2 10
+Next token is token '=' (4.6: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (4.1: )
+   $2 = nterm exp (4.2-4: 1)
+-> $$ = nterm exp (4.1-4: -1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (4.6: )
+Shifting token '=' (4.6: )
+Entering state 19
+Stack now 0 6 8 19
+Reading a token
+Next token is token '-' (4.8: )
+Shifting token '-' (4.8: )
+Entering state 2
+Stack now 0 6 8 19 2
+Reading a token
+Next token is token number (4.9: 1)
+Shifting token number (4.9: 1)
+Entering state 1
+Stack now 0 6 8 19 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (4.9: 1)
+-> $$ = nterm exp (4.9: 1)
+Entering state 10
+Stack now 0 6 8 19 2 10
+Reading a token
+Next token is token '\n' (4.10-5.0: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (4.8: )
+   $2 = nterm exp (4.9: 1)
+-> $$ = nterm exp (4.8-9: -1)
+Entering state 28
+Stack now 0 6 8 19 28
+Next token is token '\n' (4.10-5.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (4.1-4: -1)
+   $2 = token '=' (4.6: )
+   $3 = nterm exp (4.8-9: -1)
+-> $$ = nterm exp (4.1-9: -1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (4.10-5.0: )
+Shifting token '\n' (4.10-5.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (4.1-9: -1)
+   $2 = token '\n' (4.10-5.0: )
+-> $$ = nterm line (4.1-5.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-4.0: )
+   $2 = nterm line (4.1-5.0: )
+-> $$ = nterm input (1.1-5.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '(' (5.1: )
+Shifting token '(' (5.1: )
 Entering state 4
-Stack now 0 8 21 4
+Stack now 0 6 4
 Reading a token
-Next token is token number (1.10: 1)
-Shifting token number (1.10: 1)
+Next token is token '-' (5.2: )
+Shifting token '-' (5.2: )
+Entering state 2
+Stack now 0 6 4 2
+Reading a token
+Next token is token number (5.3: 1)
+Shifting token number (5.3: 1)
 Entering state 1
-Stack now 0 8 21 4 1
+Stack now 0 6 4 2 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.10: 1)
--> $$ = nterm exp (1.10: 1)
+   $1 = token number (5.3: 1)
+-> $$ = nterm exp (5.3: 1)
+Entering state 10
+Stack now 0 6 4 2 10
+Reading a token
+Next token is token ')' (5.4: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (5.2: )
+   $2 = nterm exp (5.3: 1)
+-> $$ = nterm exp (5.2-3: -1)
 Entering state 12
-Stack now 0 8 21 4 12
+Stack now 0 6 4 12
+Next token is token ')' (5.4: )
+Shifting token ')' (5.4: )
+Entering state 27
+Stack now 0 6 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (5.1: )
+   $2 = nterm exp (5.2-3: -1)
+   $3 = token ')' (5.4: )
+-> $$ = nterm exp (5.1-4: -1)
+Entering state 8
+Stack now 0 6 8
 Reading a token
-Next token is token number (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Stack now 0 8 21 4
-Shifting token error (1.10-12: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token number (1.12: 2)
-Error: discarding token number (1.12: 2)
-Error: popping token error (1.10-12: )
-Stack now 0 8 21 4
-Shifting token error (1.10-12: )
-Entering state 11
-Stack now 0 8 21 4 11
+Next token is token '^' (5.5: )
+Shifting token '^' (5.5: )
+Entering state 24
+Stack now 0 6 8 24
 Reading a token
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
-Entering state 30
-Stack now 0 8 21 30
+Next token is token number (5.6: 2)
+Shifting token number (5.6: 2)
+Entering state 1
+Stack now 0 6 8 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (5.6: 2)
+-> $$ = nterm exp (5.6: 2)
+Entering state 33
+Stack now 0 6 8 24 33
 Reading a token
-Next token is token '=' (1.15: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
+Next token is token '=' (5.8: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (5.1-4: -1)
+   $2 = token '^' (5.5: )
+   $3 = nterm exp (5.6: 2)
+-> $$ = nterm exp (5.1-6: 1)
 Entering state 8
-Stack now 0 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
+Stack now 0 6 8
+Next token is token '=' (5.8: )
+Shifting token '=' (5.8: )
 Entering state 19
-Stack now 0 8 19
+Stack now 0 6 8 19
 Reading a token
-Next token is token number (1.17: 1)
-Shifting token number (1.17: 1)
+Next token is token number (5.10: 1)
+Shifting token number (5.10: 1)
 Entering state 1
-Stack now 0 8 19 1
+Stack now 0 6 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.17: 1)
--> $$ = nterm exp (1.17: 1)
+   $1 = token number (5.10: 1)
+-> $$ = nterm exp (5.10: 1)
 Entering state 28
-Stack now 0 8 19 28
+Stack now 0 6 8 19 28
 Reading a token
-Next token is token '\n' (1.18-2.0: )
+Next token is token '\n' (5.11-6.0: )
 Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
+   $1 = nterm exp (5.1-6: 1)
+   $2 = token '=' (5.8: )
+   $3 = nterm exp (5.10: 1)
+-> $$ = nterm exp (5.1-10: 1)
 Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Stack now 0 6 8
+Next token is token '\n' (5.11-6.0: )
+Shifting token '\n' (5.11-6.0: )
 Entering state 25
-Stack now 0 8 25
+Stack now 0 6 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 2222)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+   $1 = nterm exp (5.1-10: 1)
+   $2 = token '\n' (5.11-6.0: )
+-> $$ = nterm line (5.1-6.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-5.0: )
+   $2 = nterm line (5.1-6.0: )
+-> $$ = nterm input (1.1-6.0: )
 Entering state 6
 Stack now 0 6
 Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '\n' (6.1-7.0: )
+Shifting token '\n' (6.1-7.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (6.1-7.0: )
+-> $$ = nterm line (6.1-7.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-6.0: )
+   $2 = nterm line (6.1-7.0: )
+-> $$ = nterm input (1.1-7.0: )
+Entering state 6
+Stack now 0 6
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
+Next token is token '-' (7.1: )
+Shifting token '-' (7.1: )
+Entering state 2
+Stack now 0 6 2
 Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
+Next token is token '-' (7.2: )
+Shifting token '-' (7.2: )
+Entering state 2
+Stack now 0 6 2 2
 Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
+Next token is token '-' (7.3: )
+Shifting token '-' (7.3: )
+Entering state 2
+Stack now 0 6 2 2 2
 Reading a token
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
+Next token is token number (7.4: 1)
+Shifting token number (7.4: 1)
+Entering state 1
+Stack now 0 6 2 2 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (7.4: 1)
+-> $$ = nterm exp (7.4: 1)
+Entering state 10
+Stack now 0 6 2 2 2 10
+Reading a token
+Next token is token '=' (7.6: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (7.3: )
+   $2 = nterm exp (7.4: 1)
+-> $$ = nterm exp (7.3-4: -1)
+Entering state 10
+Stack now 0 6 2 2 10
+Next token is token '=' (7.6: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (7.2: )
+   $2 = nterm exp (7.3-4: -1)
+-> $$ = nterm exp (7.2-4: 1)
+Entering state 10
+Stack now 0 6 2 10
+Next token is token '=' (7.6: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (7.1: )
+   $2 = nterm exp (7.2-4: 1)
+-> $$ = nterm exp (7.1-4: -1)
 Entering state 8
-Stack now 0 8
+Stack now 0 6 8
+Next token is token '=' (7.6: )
+Shifting token '=' (7.6: )
+Entering state 19
+Stack now 0 6 8 19
 Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
-Stack now 0 8 21
+Next token is token '-' (7.8: )
+Shifting token '-' (7.8: )
+Entering state 2
+Stack now 0 6 8 19 2
 Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
+Next token is token number (7.9: 1)
+Shifting token number (7.9: 1)
+Entering state 1
+Stack now 0 6 8 19 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (7.9: 1)
+-> $$ = nterm exp (7.9: 1)
+Entering state 10
+Stack now 0 6 8 19 2 10
+Reading a token
+Next token is token '\n' (7.10-8.0: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (7.8: )
+   $2 = nterm exp (7.9: 1)
+-> $$ = nterm exp (7.8-9: -1)
+Entering state 28
+Stack now 0 6 8 19 28
+Next token is token '\n' (7.10-8.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (7.1-4: -1)
+   $2 = token '=' (7.6: )
+   $3 = nterm exp (7.8-9: -1)
+-> $$ = nterm exp (7.1-9: -1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (7.10-8.0: )
+Shifting token '\n' (7.10-8.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (7.1-9: -1)
+   $2 = token '\n' (7.10-8.0: )
+-> $$ = nterm line (7.1-8.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-7.0: )
+   $2 = nterm line (7.1-8.0: )
+-> $$ = nterm input (1.1-8.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '\n' (8.1-9.0: )
+Shifting token '\n' (8.1-9.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (8.1-9.0: )
+-> $$ = nterm line (8.1-9.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-8.0: )
+   $2 = nterm line (8.1-9.0: )
+-> $$ = nterm input (1.1-9.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token number (9.1: 1)
+Shifting token number (9.1: 1)
+Entering state 1
+Stack now 0 6 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (9.1: 1)
+-> $$ = nterm exp (9.1: 1)
+Entering state 8
+Stack now 0 6 8
+Reading a token
+Next token is token '-' (9.3: )
+Shifting token '-' (9.3: )
+Entering state 20
+Stack now 0 6 8 20
+Reading a token
+Next token is token number (9.5: 2)
+Shifting token number (9.5: 2)
+Entering state 1
+Stack now 0 6 8 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (9.5: 2)
+-> $$ = nterm exp (9.5: 2)
+Entering state 29
+Stack now 0 6 8 20 29
+Reading a token
+Next token is token '-' (9.7: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (9.1: 1)
+   $2 = token '-' (9.3: )
+   $3 = nterm exp (9.5: 2)
+-> $$ = nterm exp (9.1-5: -1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '-' (9.7: )
+Shifting token '-' (9.7: )
+Entering state 20
+Stack now 0 6 8 20
+Reading a token
+Next token is token number (9.9: 3)
+Shifting token number (9.9: 3)
+Entering state 1
+Stack now 0 6 8 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (9.9: 3)
+-> $$ = nterm exp (9.9: 3)
+Entering state 29
+Stack now 0 6 8 20 29
+Reading a token
+Next token is token '=' (9.11: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (9.1-5: -1)
+   $2 = token '-' (9.7: )
+   $3 = nterm exp (9.9: 3)
+-> $$ = nterm exp (9.1-9: -4)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (9.11: )
+Shifting token '=' (9.11: )
+Entering state 19
+Stack now 0 6 8 19
+Reading a token
+Next token is token '-' (9.13: )
+Shifting token '-' (9.13: )
+Entering state 2
+Stack now 0 6 8 19 2
+Reading a token
+Next token is token number (9.14: 4)
+Shifting token number (9.14: 4)
+Entering state 1
+Stack now 0 6 8 19 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (9.14: 4)
+-> $$ = nterm exp (9.14: 4)
+Entering state 10
+Stack now 0 6 8 19 2 10
+Reading a token
+Next token is token '\n' (9.15-10.0: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (9.13: )
+   $2 = nterm exp (9.14: 4)
+-> $$ = nterm exp (9.13-14: -4)
+Entering state 28
+Stack now 0 6 8 19 28
+Next token is token '\n' (9.15-10.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (9.1-9: -4)
+   $2 = token '=' (9.11: )
+   $3 = nterm exp (9.13-14: -4)
+-> $$ = nterm exp (9.1-14: -4)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (9.15-10.0: )
+Shifting token '\n' (9.15-10.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (9.1-14: -4)
+   $2 = token '\n' (9.15-10.0: )
+-> $$ = nterm line (9.1-10.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-9.0: )
+   $2 = nterm line (9.1-10.0: )
+-> $$ = nterm input (1.1-10.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token number (10.1: 1)
+Shifting token number (10.1: 1)
+Entering state 1
+Stack now 0 6 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (10.1: 1)
+-> $$ = nterm exp (10.1: 1)
+Entering state 8
+Stack now 0 6 8
+Reading a token
+Next token is token '-' (10.3: )
+Shifting token '-' (10.3: )
+Entering state 20
+Stack now 0 6 8 20
+Reading a token
+Next token is token '(' (10.5: )
+Shifting token '(' (10.5: )
 Entering state 4
-Stack now 0 8 21 4
+Stack now 0 6 8 20 4
 Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
-Error: popping token error (1.10: )
-Stack now 0 8 21 4
-Shifting token error (1.10: )
-Entering state 11
-Stack now 0 8 21 4 11
+Next token is token number (10.6: 2)
+Shifting token number (10.6: 2)
+Entering state 1
+Stack now 0 6 8 20 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (10.6: 2)
+-> $$ = nterm exp (10.6: 2)
+Entering state 12
+Stack now 0 6 8 20 4 12
 Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
-Entering state 30
-Stack now 0 8 21 30
+Next token is token '-' (10.8: )
+Shifting token '-' (10.8: )
+Entering state 20
+Stack now 0 6 8 20 4 12 20
 Reading a token
-Next token is token '+' (1.13: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
+Next token is token number (10.10: 3)
+Shifting token number (10.10: 3)
+Entering state 1
+Stack now 0 6 8 20 4 12 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (10.10: 3)
+-> $$ = nterm exp (10.10: 3)
+Entering state 29
+Stack now 0 6 8 20 4 12 20 29
+Reading a token
+Next token is token ')' (10.11: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (10.6: 2)
+   $2 = token '-' (10.8: )
+   $3 = nterm exp (10.10: 3)
+-> $$ = nterm exp (10.6-10: -1)
+Entering state 12
+Stack now 0 6 8 20 4 12
+Next token is token ')' (10.11: )
+Shifting token ')' (10.11: )
+Entering state 27
+Stack now 0 6 8 20 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (10.5: )
+   $2 = nterm exp (10.6-10: -1)
+   $3 = token ')' (10.11: )
+-> $$ = nterm exp (10.5-11: -1)
+Entering state 29
+Stack now 0 6 8 20 29
+Reading a token
+Next token is token '=' (10.13: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (10.1: 1)
+   $2 = token '-' (10.3: )
+   $3 = nterm exp (10.5-11: -1)
+-> $$ = nterm exp (10.1-11: 2)
 Entering state 8
-Stack now 0 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Stack now 0 8 21
+Stack now 0 6 8
+Next token is token '=' (10.13: )
+Shifting token '=' (10.13: )
+Entering state 19
+Stack now 0 6 8 19
 Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
+Next token is token number (10.15: 2)
+Shifting token number (10.15: 2)
+Entering state 1
+Stack now 0 6 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (10.15: 2)
+-> $$ = nterm exp (10.15: 2)
+Entering state 28
+Stack now 0 6 8 19 28
+Reading a token
+Next token is token '\n' (10.16-11.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (10.1-11: 2)
+   $2 = token '=' (10.13: )
+   $3 = nterm exp (10.15: 2)
+-> $$ = nterm exp (10.1-15: 2)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (10.16-11.0: )
+Shifting token '\n' (10.16-11.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (10.1-15: 2)
+   $2 = token '\n' (10.16-11.0: )
+-> $$ = nterm line (10.1-11.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-10.0: )
+   $2 = nterm line (10.1-11.0: )
+-> $$ = nterm input (1.1-11.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '\n' (11.1-12.0: )
+Shifting token '\n' (11.1-12.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (11.1-12.0: )
+-> $$ = nterm line (11.1-12.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-11.0: )
+   $2 = nterm line (11.1-12.0: )
+-> $$ = nterm input (1.1-12.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token number (12.1: 2)
+Shifting token number (12.1: 2)
+Entering state 1
+Stack now 0 6 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (12.1: 2)
+-> $$ = nterm exp (12.1: 2)
+Entering state 8
+Stack now 0 6 8
+Reading a token
+Next token is token '^' (12.2: )
+Shifting token '^' (12.2: )
+Entering state 24
+Stack now 0 6 8 24
+Reading a token
+Next token is token number (12.3: 2)
+Shifting token number (12.3: 2)
+Entering state 1
+Stack now 0 6 8 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (12.3: 2)
+-> $$ = nterm exp (12.3: 2)
+Entering state 33
+Stack now 0 6 8 24 33
+Reading a token
+Next token is token '^' (12.4: )
+Shifting token '^' (12.4: )
+Entering state 24
+Stack now 0 6 8 24 33 24
+Reading a token
+Next token is token number (12.5: 3)
+Shifting token number (12.5: 3)
+Entering state 1
+Stack now 0 6 8 24 33 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (12.5: 3)
+-> $$ = nterm exp (12.5: 3)
+Entering state 33
+Stack now 0 6 8 24 33 24 33
+Reading a token
+Next token is token '=' (12.7: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (12.3: 2)
+   $2 = token '^' (12.4: )
+   $3 = nterm exp (12.5: 3)
+-> $$ = nterm exp (12.3-5: 8)
+Entering state 33
+Stack now 0 6 8 24 33
+Next token is token '=' (12.7: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (12.1: 2)
+   $2 = token '^' (12.2: )
+   $3 = nterm exp (12.3-5: 8)
+-> $$ = nterm exp (12.1-5: 256)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (12.7: )
+Shifting token '=' (12.7: )
+Entering state 19
+Stack now 0 6 8 19
+Reading a token
+Next token is token number (12.9-11: 256)
+Shifting token number (12.9-11: 256)
+Entering state 1
+Stack now 0 6 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (12.9-11: 256)
+-> $$ = nterm exp (12.9-11: 256)
+Entering state 28
+Stack now 0 6 8 19 28
+Reading a token
+Next token is token '\n' (12.12-13.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (12.1-5: 256)
+   $2 = token '=' (12.7: )
+   $3 = nterm exp (12.9-11: 256)
+-> $$ = nterm exp (12.1-11: 256)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (12.12-13.0: )
+Shifting token '\n' (12.12-13.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (12.1-11: 256)
+   $2 = token '\n' (12.12-13.0: )
+-> $$ = nterm line (12.1-13.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-12.0: )
+   $2 = nterm line (12.1-13.0: )
+-> $$ = nterm input (1.1-13.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '(' (13.1: )
+Shifting token '(' (13.1: )
 Entering state 4
-Stack now 0 8 21 4
+Stack now 0 6 4
 Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
-Error: popping token error (1.16: )
-Stack now 0 8 21 4
-Shifting token error (1.16: )
-Entering state 11
-Stack now 0 8 21 4 11
+Next token is token number (13.2: 2)
+Shifting token number (13.2: 2)
+Entering state 1
+Stack now 0 6 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (13.2: 2)
+-> $$ = nterm exp (13.2: 2)
+Entering state 12
+Stack now 0 6 4 12
 Reading a token
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 30
-Stack now 0 8 21 30
+Next token is token '^' (13.3: )
+Shifting token '^' (13.3: )
+Entering state 24
+Stack now 0 6 4 12 24
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
+Next token is token number (13.4: 2)
+Shifting token number (13.4: 2)
+Entering state 1
+Stack now 0 6 4 12 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (13.4: 2)
+-> $$ = nterm exp (13.4: 2)
+Entering state 33
+Stack now 0 6 4 12 24 33
+Reading a token
+Next token is token ')' (13.5: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (13.2: 2)
+   $2 = token '^' (13.3: )
+   $3 = nterm exp (13.4: 2)
+-> $$ = nterm exp (13.2-4: 4)
+Entering state 12
+Stack now 0 6 4 12
+Next token is token ')' (13.5: )
+Shifting token ')' (13.5: )
+Entering state 27
+Stack now 0 6 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (13.1: )
+   $2 = nterm exp (13.2-4: 4)
+   $3 = token ')' (13.5: )
+-> $$ = nterm exp (13.1-5: 4)
 Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Stack now 0 6 8
+Reading a token
+Next token is token '^' (13.6: )
+Shifting token '^' (13.6: )
+Entering state 24
+Stack now 0 6 8 24
+Reading a token
+Next token is token number (13.7: 3)
+Shifting token number (13.7: 3)
+Entering state 1
+Stack now 0 6 8 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (13.7: 3)
+-> $$ = nterm exp (13.7: 3)
+Entering state 33
+Stack now 0 6 8 24 33
+Reading a token
+Next token is token '=' (13.9: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (13.1-5: 4)
+   $2 = token '^' (13.6: )
+   $3 = nterm exp (13.7: 3)
+-> $$ = nterm exp (13.1-7: 64)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (13.9: )
+Shifting token '=' (13.9: )
+Entering state 19
+Stack now 0 6 8 19
+Reading a token
+Next token is token number (13.11-12: 64)
+Shifting token number (13.11-12: 64)
+Entering state 1
+Stack now 0 6 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (13.11-12: 64)
+-> $$ = nterm exp (13.11-12: 64)
+Entering state 28
+Stack now 0 6 8 19 28
+Reading a token
+Next token is token '\n' (13.13-14.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (13.1-7: 64)
+   $2 = token '=' (13.9: )
+   $3 = nterm exp (13.11-12: 64)
+-> $$ = nterm exp (13.1-12: 64)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (13.13-14.0: )
+Shifting token '\n' (13.13-14.0: )
 Entering state 25
-Stack now 0 8 25
+Stack now 0 6 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 3333)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (13.1-12: 64)
+   $2 = token '\n' (13.13-14.0: )
+-> $$ = nterm line (13.1-14.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-13.0: )
+   $2 = nterm line (13.1-14.0: )
+-> $$ = nterm input (1.1-14.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (14.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (14.1: )
+Cleanup: popping nterm input (1.1-14.0: )
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Return for a new token:
+Reading a token
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 23
+Stack now 0 8 23
+Return for a new token:
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '/' (1.3: )
+Stack now 0
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Stack now 0 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (1.1-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -51249,15 +53256,13 @@
 Entering state 6
 Stack now 0 6
 Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Stack now 0
+Cleanup: discarding lookahead token '+' (2.1: )
+Stack now 0
+./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -51277,7 +53282,7 @@
 Shifting token error (1.4: )
 Entering state 9
 Stack now 0 4 2 9
-Reducing stack by rule 15 (line 128):
+Reducing stack by rule 15 (line 140):
    $1 = token '-' (1.2: )
    $2 = token error (1.4: )
 Stack now 0 4
@@ -51296,7 +53301,7 @@
 Shifting token ')' (1.5: )
 Entering state 26
 Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.1: )
    $2 = token error (1.2-4: )
    $3 = token ')' (1.5: )
@@ -51343,7 +53348,7 @@
 Shifting token ')' (1.13: )
 Entering state 26
 Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.9: )
    $2 = token error (1.10-12: )
    $3 = token ')' (1.13: )
@@ -51352,7 +53357,7 @@
 Stack now 0 8 21 30
 Reading a token
 Next token is token '=' (1.15: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 118):
    $1 = nterm exp (1.1-5: 1111)
    $2 = token '+' (1.7: )
    $3 = nterm exp (1.9-13: 1111)
@@ -51406,186 +53411,7 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
-Error: popping token error (1.10: )
-Stack now 0 8 21 4
-Shifting token error (1.10: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
-Error: popping token error (1.16: )
-Stack now 0 8 21 4
-Shifting token error (1.16: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 3333)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1355: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1357: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1355: cat stderr
-./calc.at:1357: cat stderr
-input:
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1355:  $PREPARSER ./calc  input
-  | (* *) + (*) + (*)
-./calc.at:1357:  $PREPARSER ./calc  input
+./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
 Starting parse
@@ -51632,178 +53458,52 @@
 Entering state 31
 Stack now 0 8 21 30 22 31
 Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 120):
+Next token is token '=' (1.11: )
+Reducing stack by rule 9 (line 114):
    $1 = nterm exp (1.5: 2)
    $2 = token '*' (1.7: )
    $3 = nterm exp (1.9: 3)
 -> $$ = nterm exp (1.5-9: 6)
 Entering state 30
 Stack now 0 8 21 30
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 118):
+Next token is token '=' (1.11: )
+Reducing stack by rule 7 (line 112):
    $1 = nterm exp (1.1: 1)
    $2 = token '+' (1.3: )
    $3 = nterm exp (1.5-9: 6)
 -> $$ = nterm exp (1.1-9: 7)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
-Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Stack now 0 8 21 5 14
-Reducing stack by rule 17 (line 142):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
-Error: popping token error (1.10: )
-Stack now 0 8 21 4
-Shifting token error (1.10: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
-Error: popping token error (1.16: )
-Stack now 0 8 21 4
-Shifting token error (1.16: )
-Entering state 11
-Stack now 0 8 21 4 11
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 19
+Stack now 0 8 19
 Reading a token
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 30
-Stack now 0 8 21 30
+Next token is token number (1.13: 7)
+Shifting token number (1.13: 7)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.13: 7)
+-> $$ = nterm exp (1.13: 7)
+Entering state 28
+Stack now 0 8 19 28
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
+Next token is token '\n' (1.14-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-9: 7)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13: 7)
+-> $$ = nterm exp (1.1-13: 7)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (1.14-2.0: )
+Shifting token '\n' (1.14-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 3333)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-13: 7)
+   $2 = token '\n' (1.14-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -51813,97 +53513,1228 @@
 Entering state 6
 Stack now 0 6
 Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
+Next token is token number (2.1: 1)
+Shifting token number (2.1: 1)
 Entering state 1
-Stack now 0 1
+Stack now 0 6 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+   $1 = token number (2.1: 1)
+-> $$ = nterm exp (2.1: 1)
 Entering state 8
-Stack now 0 8
+Stack now 0 6 8
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
+Next token is token '+' (2.3: )
+Shifting token '+' (2.3: )
 Entering state 21
-Stack now 0 8 21
+Stack now 0 6 8 21
 Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
+Next token is token number (2.5: 2)
+Shifting token number (2.5: 2)
 Entering state 1
-Stack now 0 8 21 1
+Stack now 0 6 8 21 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
+   $1 = token number (2.5: 2)
+-> $$ = nterm exp (2.5: 2)
 Entering state 30
-Stack now 0 8 21 30
+Stack now 0 6 8 21 30
 Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
+Next token is token '*' (2.7: )
+Shifting token '*' (2.7: )
 Entering state 22
-Stack now 0 8 21 30 22
+Stack now 0 6 8 21 30 22
 Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
+Next token is token '-' (2.9: )
+Shifting token '-' (2.9: )
+Entering state 2
+Stack now 0 6 8 21 30 22 2
+Reading a token
+Next token is token number (2.10: 3)
+Shifting token number (2.10: 3)
 Entering state 1
-Stack now 0 8 21 30 22 1
+Stack now 0 6 8 21 30 22 2 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
+   $1 = token number (2.10: 3)
+-> $$ = nterm exp (2.10: 3)
+Entering state 10
+Stack now 0 6 8 21 30 22 2 10
+Reading a token
+Next token is token '=' (2.12: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (2.9: )
+   $2 = nterm exp (2.10: 3)
+-> $$ = nterm exp (2.9-10: -3)
 Entering state 31
-Stack now 0 8 21 30 22 31
+Stack now 0 6 8 21 30 22 31
+Next token is token '=' (2.12: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (2.5: 2)
+   $2 = token '*' (2.7: )
+   $3 = nterm exp (2.9-10: -3)
+-> $$ = nterm exp (2.5-10: -6)
+Entering state 30
+Stack now 0 6 8 21 30
+Next token is token '=' (2.12: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (2.1: 1)
+   $2 = token '+' (2.3: )
+   $3 = nterm exp (2.5-10: -6)
+-> $$ = nterm exp (2.1-10: -5)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (2.12: )
+Shifting token '=' (2.12: )
+Entering state 19
+Stack now 0 6 8 19
 Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 120):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
+Next token is token '-' (2.14: )
+Shifting token '-' (2.14: )
+Entering state 2
+Stack now 0 6 8 19 2
+Reading a token
+Next token is token number (2.15: 5)
+Shifting token number (2.15: 5)
+Entering state 1
+Stack now 0 6 8 19 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (2.15: 5)
+-> $$ = nterm exp (2.15: 5)
+Entering state 10
+Stack now 0 6 8 19 2 10
+Reading a token
+Next token is token '\n' (2.16-3.0: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (2.14: )
+   $2 = nterm exp (2.15: 5)
+-> $$ = nterm exp (2.14-15: -5)
+Entering state 28
+Stack now 0 6 8 19 28
+Next token is token '\n' (2.16-3.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (2.1-10: -5)
+   $2 = token '=' (2.12: )
+   $3 = nterm exp (2.14-15: -5)
+-> $$ = nterm exp (2.1-15: -5)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (2.16-3.0: )
+Shifting token '\n' (2.16-3.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (2.1-15: -5)
+   $2 = token '\n' (2.16-3.0: )
+-> $$ = nterm line (2.1-3.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-2.0: )
+   $2 = nterm line (2.1-3.0: )
+-> $$ = nterm input (1.1-3.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '\n' (3.1-4.0: )
+Shifting token '\n' (3.1-4.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (3.1-4.0: )
+-> $$ = nterm line (3.1-4.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-3.0: )
+   $2 = nterm line (3.1-4.0: )
+-> $$ = nterm input (1.1-4.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '-' (4.1: )
+Shifting token '-' (4.1: )
+Entering state 2
+Stack now 0 6 2
+Reading a token
+Next token is token number (4.2: 1)
+Shifting token number (4.2: 1)
+Entering state 1
+Stack now 0 6 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (4.2: 1)
+-> $$ = nterm exp (4.2: 1)
+Entering state 10
+Stack now 0 6 2 10
+Reading a token
+Next token is token '^' (4.3: )
+Shifting token '^' (4.3: )
+Entering state 24
+Stack now 0 6 2 10 24
+Reading a token
+Next token is token number (4.4: 2)
+Shifting token number (4.4: 2)
+Entering state 1
+Stack now 0 6 2 10 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (4.4: 2)
+-> $$ = nterm exp (4.4: 2)
+Entering state 33
+Stack now 0 6 2 10 24 33
+Reading a token
+Next token is token '=' (4.6: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (4.2: 1)
+   $2 = token '^' (4.3: )
+   $3 = nterm exp (4.4: 2)
+-> $$ = nterm exp (4.2-4: 1)
+Entering state 10
+Stack now 0 6 2 10
+Next token is token '=' (4.6: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (4.1: )
+   $2 = nterm exp (4.2-4: 1)
+-> $$ = nterm exp (4.1-4: -1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (4.6: )
+Shifting token '=' (4.6: )
+Entering state 19
+Stack now 0 6 8 19
+Reading a token
+Next token is token '-' (4.8: )
+Shifting token '-' (4.8: )
+Entering state 2
+Stack now 0 6 8 19 2
+Reading a token
+Next token is token number (4.9: 1)
+Shifting token number (4.9: 1)
+Entering state 1
+Stack now 0 6 8 19 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (4.9: 1)
+-> $$ = nterm exp (4.9: 1)
+Entering state 10
+Stack now 0 6 8 19 2 10
+Reading a token
+Next token is token '\n' (4.10-5.0: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (4.8: )
+   $2 = nterm exp (4.9: 1)
+-> $$ = nterm exp (4.8-9: -1)
+Entering state 28
+Stack now 0 6 8 19 28
+Next token is token '\n' (4.10-5.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (4.1-4: -1)
+   $2 = token '=' (4.6: )
+   $3 = nterm exp (4.8-9: -1)
+-> $$ = nterm exp (4.1-9: -1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (4.10-5.0: )
+Shifting token '\n' (4.10-5.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (4.1-9: -1)
+   $2 = token '\n' (4.10-5.0: )
+-> $$ = nterm line (4.1-5.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-4.0: )
+   $2 = nterm line (4.1-5.0: )
+-> $$ = nterm input (1.1-5.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '(' (5.1: )
+Shifting token '(' (5.1: )
+Entering state 4
+Stack now 0 6 4
+Reading a token
+Next token is token '-' (5.2: )
+Shifting token '-' (5.2: )
+Entering state 2
+Stack now 0 6 4 2
+Reading a token
+Next token is token number (5.3: 1)
+Shifting token number (5.3: 1)
+Entering state 1
+Stack now 0 6 4 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (5.3: 1)
+-> $$ = nterm exp (5.3: 1)
+Entering state 10
+Stack now 0 6 4 2 10
+Reading a token
+Next token is token ')' (5.4: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (5.2: )
+   $2 = nterm exp (5.3: 1)
+-> $$ = nterm exp (5.2-3: -1)
+Entering state 12
+Stack now 0 6 4 12
+Next token is token ')' (5.4: )
+Shifting token ')' (5.4: )
+Entering state 27
+Stack now 0 6 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (5.1: )
+   $2 = nterm exp (5.2-3: -1)
+   $3 = token ')' (5.4: )
+-> $$ = nterm exp (5.1-4: -1)
+Entering state 8
+Stack now 0 6 8
+Reading a token
+Next token is token '^' (5.5: )
+Shifting token '^' (5.5: )
+Entering state 24
+Stack now 0 6 8 24
+Reading a token
+Next token is token number (5.6: 2)
+Shifting token number (5.6: 2)
+Entering state 1
+Stack now 0 6 8 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (5.6: 2)
+-> $$ = nterm exp (5.6: 2)
+Entering state 33
+Stack now 0 6 8 24 33
+Reading a token
+Next token is token '=' (5.8: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (5.1-4: -1)
+   $2 = token '^' (5.5: )
+   $3 = nterm exp (5.6: 2)
+-> $$ = nterm exp (5.1-6: 1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (5.8: )
+Shifting token '=' (5.8: )
+Entering state 19
+Stack now 0 6 8 19
+Reading a token
+Next token is token number (5.10: 1)
+Shifting token number (5.10: 1)
+Entering state 1
+Stack now 0 6 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (5.10: 1)
+-> $$ = nterm exp (5.10: 1)
+Entering state 28
+Stack now 0 6 8 19 28
+Reading a token
+Next token is token '\n' (5.11-6.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (5.1-6: 1)
+   $2 = token '=' (5.8: )
+   $3 = nterm exp (5.10: 1)
+-> $$ = nterm exp (5.1-10: 1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (5.11-6.0: )
+Shifting token '\n' (5.11-6.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (5.1-10: 1)
+   $2 = token '\n' (5.11-6.0: )
+-> $$ = nterm line (5.1-6.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-5.0: )
+   $2 = nterm line (5.1-6.0: )
+-> $$ = nterm input (1.1-6.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '\n' (6.1-7.0: )
+Shifting token '\n' (6.1-7.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (6.1-7.0: )
+-> $$ = nterm line (6.1-7.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-6.0: )
+   $2 = nterm line (6.1-7.0: )
+-> $$ = nterm input (1.1-7.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '-' (7.1: )
+Shifting token '-' (7.1: )
+Entering state 2
+Stack now 0 6 2
+Reading a token
+Next token is token '-' (7.2: )
+Shifting token '-' (7.2: )
+Entering state 2
+Stack now 0 6 2 2
+Reading a token
+Next token is token '-' (7.3: )
+Shifting token '-' (7.3: )
+Entering state 2
+Stack now 0 6 2 2 2
+Reading a token
+Next token is token number (7.4: 1)
+Shifting token number (7.4: 1)
+Entering state 1
+Stack now 0 6 2 2 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (7.4: 1)
+-> $$ = nterm exp (7.4: 1)
+Entering state 10
+Stack now 0 6 2 2 2 10
+Reading a token
+Next token is token '=' (7.6: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (7.3: )
+   $2 = nterm exp (7.4: 1)
+-> $$ = nterm exp (7.3-4: -1)
+Entering state 10
+Stack now 0 6 2 2 10
+Next token is token '=' (7.6: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (7.2: )
+   $2 = nterm exp (7.3-4: -1)
+-> $$ = nterm exp (7.2-4: 1)
+Entering state 10
+Stack now 0 6 2 10
+Next token is token '=' (7.6: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (7.1: )
+   $2 = nterm exp (7.2-4: 1)
+-> $$ = nterm exp (7.1-4: -1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (7.6: )
+Shifting token '=' (7.6: )
+Entering state 19
+Stack now 0 6 8 19
+Reading a token
+Next token is token '-' (7.8: )
+Shifting token '-' (7.8: )
+Entering state 2
+Stack now 0 6 8 19 2
+Reading a token
+Next token is token number (7.9: 1)
+Shifting token number (7.9: 1)
+Entering state 1
+Stack now 0 6 8 19 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (7.9: 1)
+-> $$ = nterm exp (7.9: 1)
+Entering state 10
+Stack now 0 6 8 19 2 10
+Reading a token
+Next token is token '\n' (7.10-8.0: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (7.8: )
+   $2 = nterm exp (7.9: 1)
+-> $$ = nterm exp (7.8-9: -1)
+Entering state 28
+Stack now 0 6 8 19 28
+Next token is token '\n' (7.10-8.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (7.1-4: -1)
+   $2 = token '=' (7.6: )
+   $3 = nterm exp (7.8-9: -1)
+-> $$ = nterm exp (7.1-9: -1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (7.10-8.0: )
+Shifting token '\n' (7.10-8.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (7.1-9: -1)
+   $2 = token '\n' (7.10-8.0: )
+-> $$ = nterm line (7.1-8.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-7.0: )
+   $2 = nterm line (7.1-8.0: )
+-> $$ = nterm input (1.1-8.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '\n' (8.1-9.0: )
+Shifting token '\n' (8.1-9.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (8.1-9.0: )
+-> $$ = nterm line (8.1-9.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-8.0: )
+   $2 = nterm line (8.1-9.0: )
+-> $$ = nterm input (1.1-9.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token number (9.1: 1)
+Shifting token number (9.1: 1)
+Entering state 1
+Stack now 0 6 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (9.1: 1)
+-> $$ = nterm exp (9.1: 1)
+Entering state 8
+Stack now 0 6 8
+Reading a token
+Next token is token '-' (9.3: )
+Shifting token '-' (9.3: )
+Entering state 20
+Stack now 0 6 8 20
+Reading a token
+Next token is token number (9.5: 2)
+Shifting token number (9.5: 2)
+Entering state 1
+Stack now 0 6 8 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (9.5: 2)
+-> $$ = nterm exp (9.5: 2)
+Entering state 29
+Stack now 0 6 8 20 29
+Reading a token
+Next token is token '-' (9.7: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (9.1: 1)
+   $2 = token '-' (9.3: )
+   $3 = nterm exp (9.5: 2)
+-> $$ = nterm exp (9.1-5: -1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '-' (9.7: )
+Shifting token '-' (9.7: )
+Entering state 20
+Stack now 0 6 8 20
+Reading a token
+Next token is token number (9.9: 3)
+Shifting token number (9.9: 3)
+Entering state 1
+Stack now 0 6 8 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (9.9: 3)
+-> $$ = nterm exp (9.9: 3)
+Entering state 29
+Stack now 0 6 8 20 29
+Reading a token
+Next token is token '=' (9.11: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (9.1-5: -1)
+   $2 = token '-' (9.7: )
+   $3 = nterm exp (9.9: 3)
+-> $$ = nterm exp (9.1-9: -4)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (9.11: )
+Shifting token '=' (9.11: )
+Entering state 19
+Stack now 0 6 8 19
+Reading a token
+Next token is token '-' (9.13: )
+Shifting token '-' (9.13: )
+Entering state 2
+Stack now 0 6 8 19 2
+Reading a token
+Next token is token number (9.14: 4)
+Shifting token number (9.14: 4)
+Entering state 1
+Stack now 0 6 8 19 2 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (9.14: 4)
+-> $$ = nterm exp (9.14: 4)
+Entering state 10
+Stack now 0 6 8 19 2 10
+Reading a token
+Next token is token '\n' (9.15-10.0: )
+Reducing stack by rule 11 (line 124):
+   $1 = token '-' (9.13: )
+   $2 = nterm exp (9.14: 4)
+-> $$ = nterm exp (9.13-14: -4)
+Entering state 28
+Stack now 0 6 8 19 28
+Next token is token '\n' (9.15-10.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (9.1-9: -4)
+   $2 = token '=' (9.11: )
+   $3 = nterm exp (9.13-14: -4)
+-> $$ = nterm exp (9.1-14: -4)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (9.15-10.0: )
+Shifting token '\n' (9.15-10.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (9.1-14: -4)
+   $2 = token '\n' (9.15-10.0: )
+-> $$ = nterm line (9.1-10.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-9.0: )
+   $2 = nterm line (9.1-10.0: )
+-> $$ = nterm input (1.1-10.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token number (10.1: 1)
+Shifting token number (10.1: 1)
+Entering state 1
+Stack now 0 6 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (10.1: 1)
+-> $$ = nterm exp (10.1: 1)
+Entering state 8
+Stack now 0 6 8
+Reading a token
+Next token is token '-' (10.3: )
+Shifting token '-' (10.3: )
+Entering state 20
+Stack now 0 6 8 20
+Reading a token
+Next token is token '(' (10.5: )
+Shifting token '(' (10.5: )
+Entering state 4
+Stack now 0 6 8 20 4
+Reading a token
+Next token is token number (10.6: 2)
+Shifting token number (10.6: 2)
+Entering state 1
+Stack now 0 6 8 20 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (10.6: 2)
+-> $$ = nterm exp (10.6: 2)
+Entering state 12
+Stack now 0 6 8 20 4 12
+Reading a token
+Next token is token '-' (10.8: )
+Shifting token '-' (10.8: )
+Entering state 20
+Stack now 0 6 8 20 4 12 20
+Reading a token
+Next token is token number (10.10: 3)
+Shifting token number (10.10: 3)
+Entering state 1
+Stack now 0 6 8 20 4 12 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (10.10: 3)
+-> $$ = nterm exp (10.10: 3)
+Entering state 29
+Stack now 0 6 8 20 4 12 20 29
+Reading a token
+Next token is token ')' (10.11: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (10.6: 2)
+   $2 = token '-' (10.8: )
+   $3 = nterm exp (10.10: 3)
+-> $$ = nterm exp (10.6-10: -1)
+Entering state 12
+Stack now 0 6 8 20 4 12
+Next token is token ')' (10.11: )
+Shifting token ')' (10.11: )
+Entering state 27
+Stack now 0 6 8 20 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (10.5: )
+   $2 = nterm exp (10.6-10: -1)
+   $3 = token ')' (10.11: )
+-> $$ = nterm exp (10.5-11: -1)
+Entering state 29
+Stack now 0 6 8 20 29
+Reading a token
+Next token is token '=' (10.13: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (10.1: 1)
+   $2 = token '-' (10.3: )
+   $3 = nterm exp (10.5-11: -1)
+-> $$ = nterm exp (10.1-11: 2)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (10.13: )
+Shifting token '=' (10.13: )
+Entering state 19
+Stack now 0 6 8 19
+Reading a token
+Next token is token number (10.15: 2)
+Shifting token number (10.15: 2)
+Entering state 1
+Stack now 0 6 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (10.15: 2)
+-> $$ = nterm exp (10.15: 2)
+Entering state 28
+Stack now 0 6 8 19 28
+Reading a token
+Next token is token '\n' (10.16-11.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (10.1-11: 2)
+   $2 = token '=' (10.13: )
+   $3 = nterm exp (10.15: 2)
+-> $$ = nterm exp (10.1-15: 2)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (10.16-11.0: )
+Shifting token '\n' (10.16-11.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (10.1-15: 2)
+   $2 = token '\n' (10.16-11.0: )
+-> $$ = nterm line (10.1-11.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-10.0: )
+   $2 = nterm line (10.1-11.0: )
+-> $$ = nterm input (1.1-11.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '\n' (11.1-12.0: )
+Shifting token '\n' (11.1-12.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (11.1-12.0: )
+-> $$ = nterm line (11.1-12.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-11.0: )
+   $2 = nterm line (11.1-12.0: )
+-> $$ = nterm input (1.1-12.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token number (12.1: 2)
+Shifting token number (12.1: 2)
+Entering state 1
+Stack now 0 6 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (12.1: 2)
+-> $$ = nterm exp (12.1: 2)
+Entering state 8
+Stack now 0 6 8
+Reading a token
+Next token is token '^' (12.2: )
+Shifting token '^' (12.2: )
+Entering state 24
+Stack now 0 6 8 24
+Reading a token
+Next token is token number (12.3: 2)
+Shifting token number (12.3: 2)
+Entering state 1
+Stack now 0 6 8 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (12.3: 2)
+-> $$ = nterm exp (12.3: 2)
+Entering state 33
+Stack now 0 6 8 24 33
+Reading a token
+Next token is token '^' (12.4: )
+Shifting token '^' (12.4: )
+Entering state 24
+Stack now 0 6 8 24 33 24
+Reading a token
+Next token is token number (12.5: 3)
+Shifting token number (12.5: 3)
+Entering state 1
+Stack now 0 6 8 24 33 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (12.5: 3)
+-> $$ = nterm exp (12.5: 3)
+Entering state 33
+Stack now 0 6 8 24 33 24 33
+Reading a token
+Next token is token '=' (12.7: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (12.3: 2)
+   $2 = token '^' (12.4: )
+   $3 = nterm exp (12.5: 3)
+-> $$ = nterm exp (12.3-5: 8)
+Entering state 33
+Stack now 0 6 8 24 33
+Next token is token '=' (12.7: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (12.1: 2)
+   $2 = token '^' (12.2: )
+   $3 = nterm exp (12.3-5: 8)
+-> $$ = nterm exp (12.1-5: 256)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (12.7: )
+Shifting token '=' (12.7: )
+Entering state 19
+Stack now 0 6 8 19
+Reading a token
+Next token is token number (12.9-11: 256)
+Shifting token number (12.9-11: 256)
+Entering state 1
+Stack now 0 6 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (12.9-11: 256)
+-> $$ = nterm exp (12.9-11: 256)
+Entering state 28
+Stack now 0 6 8 19 28
+Reading a token
+Next token is token '\n' (12.12-13.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (12.1-5: 256)
+   $2 = token '=' (12.7: )
+   $3 = nterm exp (12.9-11: 256)
+-> $$ = nterm exp (12.1-11: 256)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (12.12-13.0: )
+Shifting token '\n' (12.12-13.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (12.1-11: 256)
+   $2 = token '\n' (12.12-13.0: )
+-> $$ = nterm line (12.1-13.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-12.0: )
+   $2 = nterm line (12.1-13.0: )
+-> $$ = nterm input (1.1-13.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '(' (13.1: )
+Shifting token '(' (13.1: )
+Entering state 4
+Stack now 0 6 4
+Reading a token
+Next token is token number (13.2: 2)
+Shifting token number (13.2: 2)
+Entering state 1
+Stack now 0 6 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (13.2: 2)
+-> $$ = nterm exp (13.2: 2)
+Entering state 12
+Stack now 0 6 4 12
+Reading a token
+Next token is token '^' (13.3: )
+Shifting token '^' (13.3: )
+Entering state 24
+Stack now 0 6 4 12 24
+Reading a token
+Next token is token number (13.4: 2)
+Shifting token number (13.4: 2)
+Entering state 1
+Stack now 0 6 4 12 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (13.4: 2)
+-> $$ = nterm exp (13.4: 2)
+Entering state 33
+Stack now 0 6 4 12 24 33
+Reading a token
+Next token is token ')' (13.5: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (13.2: 2)
+   $2 = token '^' (13.3: )
+   $3 = nterm exp (13.4: 2)
+-> $$ = nterm exp (13.2-4: 4)
+Entering state 12
+Stack now 0 6 4 12
+Next token is token ')' (13.5: )
+Shifting token ')' (13.5: )
+Entering state 27
+Stack now 0 6 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (13.1: )
+   $2 = nterm exp (13.2-4: 4)
+   $3 = token ')' (13.5: )
+-> $$ = nterm exp (13.1-5: 4)
+Entering state 8
+Stack now 0 6 8
+Reading a token
+Next token is token '^' (13.6: )
+Shifting token '^' (13.6: )
+Entering state 24
+Stack now 0 6 8 24
+Reading a token
+Next token is token number (13.7: 3)
+Shifting token number (13.7: 3)
+Entering state 1
+Stack now 0 6 8 24 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (13.7: 3)
+-> $$ = nterm exp (13.7: 3)
+Entering state 33
+Stack now 0 6 8 24 33
+Reading a token
+Next token is token '=' (13.9: )
+Reducing stack by rule 12 (line 125):
+   $1 = nterm exp (13.1-5: 4)
+   $2 = token '^' (13.6: )
+   $3 = nterm exp (13.7: 3)
+-> $$ = nterm exp (13.1-7: 64)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (13.9: )
+Shifting token '=' (13.9: )
+Entering state 19
+Stack now 0 6 8 19
+Reading a token
+Next token is token number (13.11-12: 64)
+Shifting token number (13.11-12: 64)
+Entering state 1
+Stack now 0 6 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (13.11-12: 64)
+-> $$ = nterm exp (13.11-12: 64)
+Entering state 28
+Stack now 0 6 8 19 28
+Reading a token
+Next token is token '\n' (13.13-14.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (13.1-7: 64)
+   $2 = token '=' (13.9: )
+   $3 = nterm exp (13.11-12: 64)
+-> $$ = nterm exp (13.1-12: 64)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (13.13-14.0: )
+Shifting token '\n' (13.13-14.0: )
+Entering state 25
+Stack now 0 6 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (13.1-12: 64)
+   $2 = token '\n' (13.13-14.0: )
+-> $$ = nterm line (13.1-14.0: )
+Entering state 18
+Stack now 0 6 18
+Reducing stack by rule 2 (line 92):
+   $1 = nterm input (1.1-13.0: )
+   $2 = nterm line (13.1-14.0: )
+-> $$ = nterm input (1.1-14.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (14.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (14.1: )
+Cleanup: popping nterm input (1.1-14.0: )
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Stack now 0 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Stack now 0
+Cleanup: discarding lookahead token '+' (2.1: )
+Stack now 0
+./calc.at:1360: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+./calc.at:1358: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+./calc.at:1355: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1357: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1355: cat stderr
+  | 1 2
+./calc.at:1360:  $PREPARSER ./calc  input
+./calc.at:1358: cat stderr
+stderr:
+./calc.at:1357: cat stderr
+input:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token number (1.3: 2)
+1.3: syntax error, unexpected number
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token number (1.3: 2)
+Stack now 0
+input:
+./calc.at:1357:  $PREPARSER ./calc  /dev/null
+  | error
+./calc.at:1358:  $PREPARSER ./calc  input
+  | (* *) + (*) + (*)
+./calc.at:1355:  $PREPARSER ./calc  input
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Now at end of input.
+1.1: syntax error, unexpected end of file
+Cleanup: discarding lookahead token end of file (1.1: )
+Stack now 0
+./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token invalid token (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token (1.1: )
+Stack now 0
+stderr:
+./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token number (1.3: 2)
+1.3: syntax error, unexpected number
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token number (1.3: 2)
+Stack now 0
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 139):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
+Error: popping token error (1.10: )
+Stack now 0 8 21 4
+Shifting token error (1.10: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 139):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.9-11: 1111)
 Entering state 30
 Stack now 0 8 21 30
-Next token is token '+' (1.11: )
+Reading a token
+Next token is token '+' (1.13: )
 Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
 Entering state 21
 Stack now 0 8 21
 Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
+Stack now 0 8 21 4
 Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Stack now 0 8 21 5 14
-Reducing stack by rule 17 (line 142):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1355: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
+Error: popping token error (1.16: )
+Stack now 0 8 21 4
+Shifting token error (1.16: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 139):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 7 (line 118):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-17: 3333)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Now at end of input.
+1.1: syntax error, unexpected end of file
+Cleanup: discarding lookahead token end of file (1.1: )
+Stack now 0
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token invalid token (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token (1.1: )
+Stack now 0
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -51938,7 +54769,7 @@
 Shifting token ')' (1.5: )
 Entering state 26
 Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.1: )
    $2 = token error (1.2-4: )
    $3 = token ')' (1.5: )
@@ -51973,7 +54804,7 @@
 Shifting token ')' (1.11: )
 Entering state 26
 Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.9: )
    $2 = token error (1.10: )
    $3 = token ')' (1.11: )
@@ -51982,7 +54813,7 @@
 Stack now 0 8 21 30
 Reading a token
 Next token is token '+' (1.13: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 118):
    $1 = nterm exp (1.1-5: 1111)
    $2 = token '+' (1.7: )
    $3 = nterm exp (1.9-11: 1111)
@@ -52016,7 +54847,7 @@
 Shifting token ')' (1.17: )
 Entering state 26
 Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.15: )
    $2 = token error (1.16: )
    $3 = token ')' (1.17: )
@@ -52025,7 +54856,7 @@
 Stack now 0 8 21 30
 Reading a token
 Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 118):
    $1 = nterm exp (1.1-11: 2222)
    $2 = token '+' (1.13: )
    $3 = nterm exp (1.15-17: 1111)
@@ -52055,10 +54886,6 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1355:  $PREPARSER ./calc  input
-stderr:
 ./calc.at:1357: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -52069,6 +54896,50 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1360: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1358: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1360: cat stderr
+./calc.at:1355: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1358: cat stderr
+./calc.at:1357: cat stderr
+./calc.at:1355: cat stderr
+input:
+  | 1//2
+./calc.at:1360:  $PREPARSER ./calc  input
+stderr:
+input:
+input:
+  | 1 = 2 = 3
+./calc.at:1358:  $PREPARSER ./calc  input
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1357:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -52083,75 +54954,343 @@
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 23
+Stack now 0 8 23
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '/' (1.3: )
+Stack now 0
+input:
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token ')' (1.2: )
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token ')' (1.2: )
+Shifting token ')' (1.2: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.2: )
+-> $$ = nterm exp (1.1-2: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
 Entering state 21
 Stack now 0 8 21
 Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
+Next token is token '(' (1.6: )
+Shifting token '(' (1.6: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+Next token is token number (1.7: 1)
+Shifting token number (1.7: 1)
 Entering state 1
-Stack now 0 8 21 1
+Stack now 0 8 21 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
+   $1 = token number (1.7: 1)
+-> $$ = nterm exp (1.7: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Reading a token
+Next token is token '+' (1.9: )
+Shifting token '+' (1.9: )
+Entering state 21
+Stack now 0 8 21 4 12 21
+Reading a token
+Next token is token number (1.11: 1)
+Shifting token number (1.11: 1)
+Entering state 1
+Stack now 0 8 21 4 12 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.11: 1)
+-> $$ = nterm exp (1.11: 1)
 Entering state 30
-Stack now 0 8 21 30
+Stack now 0 8 21 4 12 21 30
 Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
+Next token is token '+' (1.13: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.7: 1)
+   $2 = token '+' (1.9: )
+   $3 = nterm exp (1.11: 1)
+-> $$ = nterm exp (1.7-11: 2)
+Entering state 12
+Stack now 0 8 21 4 12
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
+Stack now 0 8 21 4 12 21
 Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
+Next token is token number (1.15: 1)
+Shifting token number (1.15: 1)
 Entering state 1
-Stack now 0 8 21 30 22 1
+Stack now 0 8 21 4 12 21 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
+   $1 = token number (1.15: 1)
+-> $$ = nterm exp (1.15: 1)
+Entering state 30
+Stack now 0 8 21 4 12 21 30
 Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 120):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
+Next token is token '+' (1.17: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.7-11: 2)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15: 1)
+-> $$ = nterm exp (1.7-15: 3)
+Entering state 12
+Stack now 0 8 21 4 12
+Next token is token '+' (1.17: )
+Shifting token '+' (1.17: )
+Entering state 21
+Stack now 0 8 21 4 12 21
+Reading a token
+Next token is token ')' (1.18: )
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' (1.17: )
+Stack now 0 8 21 4 12
+Error: popping nterm exp (1.7-15: 3)
+Stack now 0 8 21 4
+Shifting token error (1.7-18: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token ')' (1.18: )
+Shifting token ')' (1.18: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.6: )
+   $2 = token error (1.7-18: )
+   $3 = token ')' (1.18: )
+-> $$ = nterm exp (1.6-18: 1111)
 Entering state 30
 Stack now 0 8 21 30
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
+Reading a token
+Next token is token '+' (1.20: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-2: 1111)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6-18: 1111)
+-> $$ = nterm exp (1.1-18: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
+Next token is token '+' (1.20: )
+Shifting token '+' (1.20: )
 Entering state 21
 Stack now 0 8 21
 Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
+Next token is token '(' (1.22: )
+Shifting token '(' (1.22: )
+Entering state 4
+Stack now 0 8 21 4
 Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Stack now 0 8 21 5 13
-Reducing stack by rule 18 (line 143):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
+Next token is token '*' (1.23: )
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.23: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.23: )
+Error: discarding token '*' (1.23: )
+Error: popping token error (1.23: )
+Stack now 0 8 21 4
+Shifting token error (1.23: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token '*' (1.25: )
+Error: discarding token '*' (1.25: )
+Error: popping token error (1.23: )
+Stack now 0 8 21 4
+Shifting token error (1.23-25: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token '*' (1.27: )
+Error: discarding token '*' (1.27: )
+Error: popping token error (1.23-25: )
+Stack now 0 8 21 4
+Shifting token error (1.23-27: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.28: )
+Shifting token ')' (1.28: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.22: )
+   $2 = token error (1.23-27: )
+   $3 = token ')' (1.28: )
+-> $$ = nterm exp (1.22-28: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '+' (1.30: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-18: 2222)
+   $2 = token '+' (1.20: )
+   $3 = nterm exp (1.22-28: 1111)
+-> $$ = nterm exp (1.1-28: 3333)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.30: )
+Shifting token '+' (1.30: )
+Entering state 21
 Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1357: cat stderr
-stderr:
+Reading a token
+Next token is token '(' (1.32: )
+Shifting token '(' (1.32: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+Next token is token number (1.33: 1)
+Shifting token number (1.33: 1)
+Entering state 1
+Stack now 0 8 21 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.33: 1)
+-> $$ = nterm exp (1.33: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Reading a token
+Next token is token '*' (1.35: )
+Shifting token '*' (1.35: )
+Entering state 22
+Stack now 0 8 21 4 12 22
+Reading a token
+Next token is token number (1.37: 2)
+Shifting token number (1.37: 2)
+Entering state 1
+Stack now 0 8 21 4 12 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.37: 2)
+-> $$ = nterm exp (1.37: 2)
+Entering state 31
+Stack now 0 8 21 4 12 22 31
+Reading a token
+Next token is token '*' (1.39: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.33: 1)
+   $2 = token '*' (1.35: )
+   $3 = nterm exp (1.37: 2)
+-> $$ = nterm exp (1.33-37: 2)
+Entering state 12
+Stack now 0 8 21 4 12
+Next token is token '*' (1.39: )
+Shifting token '*' (1.39: )
+Entering state 22
+Stack now 0 8 21 4 12 22
+Reading a token
+Next token is token '*' (1.41: )
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' (1.39: )
+Stack now 0 8 21 4 12
+Error: popping nterm exp (1.33-37: 2)
+Stack now 0 8 21 4
+Shifting token error (1.33-41: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.41: )
+Error: discarding token '*' (1.41: )
+Error: popping token error (1.33-41: )
+Stack now 0 8 21 4
+Shifting token error (1.33-41: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.42: )
+Shifting token ')' (1.42: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.32: )
+   $2 = token error (1.33-41: )
+   $3 = token ')' (1.42: )
+-> $$ = nterm exp (1.32-42: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '=' (1.44: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-28: 3333)
+   $2 = token '+' (1.30: )
+   $3 = nterm exp (1.32-42: 1111)
+-> $$ = nterm exp (1.1-42: 4444)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.44: )
+Shifting token '=' (1.44: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.46: 1)
+Shifting token number (1.46: 1)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.46: 1)
+-> $$ = nterm exp (1.46: 1)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.47-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-42: 4444)
+   $2 = token '=' (1.44: )
+   $3 = nterm exp (1.46: 1)
+1.1-46: error: 4444 != 1
+-> $$ = nterm exp (1.1-46: 4444)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.47-2.0: )
+Shifting token '\n' (1.47-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-46: 4444)
+   $2 = token '\n' (1.47-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1355:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -52165,88 +55304,357 @@
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Stack now 0 8
+Return for a new token:
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Stack now 0 8 21
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 19
+Stack now 0 8 19
+Return for a new token:
 Reading a token
 Next token is token number (1.5: 2)
 Shifting token number (1.5: 2)
 Entering state 1
-Stack now 0 8 21 1
+Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
    $1 = token number (1.5: 2)
 -> $$ = nterm exp (1.5: 2)
+Entering state 28
+Stack now 0 8 19 28
+Return for a new token:
+Reading a token
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Stack now 0 8 19
+Error: popping token '=' (1.3: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '=' (1.7: )
+Stack now 0
+./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token ')' (1.2: )
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token ')' (1.2: )
+Shifting token ')' (1.2: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.2: )
+-> $$ = nterm exp (1.1-2: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '(' (1.6: )
+Shifting token '(' (1.6: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+Next token is token number (1.7: 1)
+Shifting token number (1.7: 1)
+Entering state 1
+Stack now 0 8 21 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.7: 1)
+-> $$ = nterm exp (1.7: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Reading a token
+Next token is token '+' (1.9: )
+Shifting token '+' (1.9: )
+Entering state 21
+Stack now 0 8 21 4 12 21
+Reading a token
+Next token is token number (1.11: 1)
+Shifting token number (1.11: 1)
+Entering state 1
+Stack now 0 8 21 4 12 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.11: 1)
+-> $$ = nterm exp (1.11: 1)
 Entering state 30
-Stack now 0 8 21 30
+Stack now 0 8 21 4 12 21 30
 Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
+Next token is token '+' (1.13: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.7: 1)
+   $2 = token '+' (1.9: )
+   $3 = nterm exp (1.11: 1)
+-> $$ = nterm exp (1.7-11: 2)
+Entering state 12
+Stack now 0 8 21 4 12
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
+Stack now 0 8 21 4 12 21
 Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
+Next token is token number (1.15: 1)
+Shifting token number (1.15: 1)
 Entering state 1
-Stack now 0 8 21 30 22 1
+Stack now 0 8 21 4 12 21 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
+   $1 = token number (1.15: 1)
+-> $$ = nterm exp (1.15: 1)
+Entering state 30
+Stack now 0 8 21 4 12 21 30
 Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 120):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
+Next token is token '+' (1.17: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.7-11: 2)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15: 1)
+-> $$ = nterm exp (1.7-15: 3)
+Entering state 12
+Stack now 0 8 21 4 12
+Next token is token '+' (1.17: )
+Shifting token '+' (1.17: )
+Entering state 21
+Stack now 0 8 21 4 12 21
+Reading a token
+Next token is token ')' (1.18: )
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' (1.17: )
+Stack now 0 8 21 4 12
+Error: popping nterm exp (1.7-15: 3)
+Stack now 0 8 21 4
+Shifting token error (1.7-18: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token ')' (1.18: )
+Shifting token ')' (1.18: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.6: )
+   $2 = token error (1.7-18: )
+   $3 = token ')' (1.18: )
+-> $$ = nterm exp (1.6-18: 1111)
 Entering state 30
 Stack now 0 8 21 30
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
+Reading a token
+Next token is token '+' (1.20: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-2: 1111)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6-18: 1111)
+-> $$ = nterm exp (1.1-18: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
+Next token is token '+' (1.20: )
+Shifting token '+' (1.20: )
 Entering state 21
 Stack now 0 8 21
 Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
+Next token is token '(' (1.22: )
+Shifting token '(' (1.22: )
+Entering state 4
+Stack now 0 8 21 4
 Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Stack now 0 8 21 5 13
-Reducing stack by rule 18 (line 143):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
+Next token is token '*' (1.23: )
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.23: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.23: )
+Error: discarding token '*' (1.23: )
+Error: popping token error (1.23: )
+Stack now 0 8 21 4
+Shifting token error (1.23: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token '*' (1.25: )
+Error: discarding token '*' (1.25: )
+Error: popping token error (1.23: )
+Stack now 0 8 21 4
+Shifting token error (1.23-25: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token '*' (1.27: )
+Error: discarding token '*' (1.27: )
+Error: popping token error (1.23-25: )
+Stack now 0 8 21 4
+Shifting token error (1.23-27: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.28: )
+Shifting token ')' (1.28: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.22: )
+   $2 = token error (1.23-27: )
+   $3 = token ')' (1.28: )
+-> $$ = nterm exp (1.22-28: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '+' (1.30: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-18: 2222)
+   $2 = token '+' (1.20: )
+   $3 = nterm exp (1.22-28: 1111)
+-> $$ = nterm exp (1.1-28: 3333)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.30: )
+Shifting token '+' (1.30: )
+Entering state 21
 Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1357:  $PREPARSER ./calc  input
-./calc.at:1355: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+Reading a token
+Next token is token '(' (1.32: )
+Shifting token '(' (1.32: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+Next token is token number (1.33: 1)
+Shifting token number (1.33: 1)
+Entering state 1
+Stack now 0 8 21 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.33: 1)
+-> $$ = nterm exp (1.33: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Reading a token
+Next token is token '*' (1.35: )
+Shifting token '*' (1.35: )
+Entering state 22
+Stack now 0 8 21 4 12 22
+Reading a token
+Next token is token number (1.37: 2)
+Shifting token number (1.37: 2)
+Entering state 1
+Stack now 0 8 21 4 12 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.37: 2)
+-> $$ = nterm exp (1.37: 2)
+Entering state 31
+Stack now 0 8 21 4 12 22 31
+Reading a token
+Next token is token '*' (1.39: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.33: 1)
+   $2 = token '*' (1.35: )
+   $3 = nterm exp (1.37: 2)
+-> $$ = nterm exp (1.33-37: 2)
+Entering state 12
+Stack now 0 8 21 4 12
+Next token is token '*' (1.39: )
+Shifting token '*' (1.39: )
+Entering state 22
+Stack now 0 8 21 4 12 22
+Reading a token
+Next token is token '*' (1.41: )
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' (1.39: )
+Stack now 0 8 21 4 12
+Error: popping nterm exp (1.33-37: 2)
+Stack now 0 8 21 4
+Shifting token error (1.33-41: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.41: )
+Error: discarding token '*' (1.41: )
+Error: popping token error (1.33-41: )
+Stack now 0 8 21 4
+Shifting token error (1.33-41: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.42: )
+Shifting token ')' (1.42: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.32: )
+   $2 = token error (1.33-41: )
+   $3 = token ')' (1.42: )
+-> $$ = nterm exp (1.32-42: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '=' (1.44: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-28: 3333)
+   $2 = token '+' (1.30: )
+   $3 = nterm exp (1.32-42: 1111)
+-> $$ = nterm exp (1.1-42: 4444)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.44: )
+Shifting token '=' (1.44: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.46: 1)
+Shifting token number (1.46: 1)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.46: 1)
+-> $$ = nterm exp (1.46: 1)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.47-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-42: 4444)
+   $2 = token '=' (1.44: )
+   $3 = nterm exp (1.46: 1)
+1.1-46: error: 4444 != 1
+-> $$ = nterm exp (1.1-46: 4444)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.47-2.0: )
+Shifting token '\n' (1.47-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-46: 4444)
+   $2 = token '\n' (1.47-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+stderr:
 stderr:
-./calc.at:1355: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -52292,7 +55700,7 @@
 Stack now 0 8 21 30 22 31
 Reading a token
 Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 114):
+Reducing stack by rule 9 (line 120):
    $1 = nterm exp (1.5: 2)
    $2 = token '*' (1.7: )
    $3 = nterm exp (1.9: 3)
@@ -52300,7 +55708,7 @@
 Entering state 30
 Stack now 0 8 21 30
 Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 118):
    $1 = nterm exp (1.1: 1)
    $2 = token '+' (1.3: )
    $3 = nterm exp (1.5-9: 6)
@@ -52321,14 +55729,12 @@
 Shifting token '+' (1.14: )
 Entering state 14
 Stack now 0 8 21 5 14
-Reducing stack by rule 17 (line 130):
+Reducing stack by rule 17 (line 142):
    $1 = token '!' (1.13: )
    $2 = token '+' (1.14: )
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -52342,81 +55748,85 @@
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Stack now 0 8
+Return for a new token:
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Stack now 0 8 21
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 19
+Stack now 0 8 19
+Return for a new token:
 Reading a token
 Next token is token number (1.5: 2)
 Shifting token number (1.5: 2)
 Entering state 1
-Stack now 0 8 21 1
+Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
    $1 = token number (1.5: 2)
 -> $$ = nterm exp (1.5: 2)
-Entering state 30
-Stack now 0 8 21 30
+Entering state 28
+Stack now 0 8 19 28
+Return for a new token:
 Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Stack now 0 8 19
+Error: popping token '=' (1.3: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '=' (1.7: )
+Stack now 0
+./calc.at:1357: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+Starting parse
+Entering state 0
+Stack now 0
 Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
 Entering state 1
-Stack now 0 8 21 30 22 1
+Stack now 0 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 114):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Stack now 0 8 21
 Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 23
+Stack now 0 8 23
 Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Stack now 0 8 21 5 14
-Reducing stack by rule 17 (line 130):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1357: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1355:  $PREPARSER ./calc  input
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '/' (1.3: )
+Stack now 0
+./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1357: cat stderr
+./calc.at:1358: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1357:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -52487,19 +55897,38 @@
 Entering state 5
 Stack now 0 8 21 5
 Reading a token
-Next token is token '*' (1.14: )
-Shifting token '*' (1.14: )
-Entering state 15
-Stack now 0 8 21 5 15
-Reducing stack by rule 19 (line 144):
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Stack now 0 8 21 5 14
+Reducing stack by rule 17 (line 142):
    $1 = token '!' (1.13: )
-   $2 = token '*' (1.14: )
-1.14: memory exhausted
+   $2 = token '+' (1.14: )
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1360: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1355: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+./calc.at:1358: cat stderr
+input:
+./calc.at:1360: cat stderr
+input:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1355:  $PREPARSER ./calc  input
+  | (!!) + (1 2) = 1
+./calc.at:1357:  $PREPARSER ./calc  input
 stderr:
+stderr:
+input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -52545,7 +55974,7 @@
 Stack now 0 8 21 30 22 31
 Reading a token
 Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 114):
+Reducing stack by rule 9 (line 120):
    $1 = nterm exp (1.5: 2)
    $2 = token '*' (1.7: )
    $3 = nterm exp (1.9: 3)
@@ -52553,7 +55982,7 @@
 Entering state 30
 Stack now 0 8 21 30
 Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 118):
    $1 = nterm exp (1.1: 1)
    $2 = token '+' (1.3: )
    $3 = nterm exp (1.5-9: 6)
@@ -52574,96 +56003,339 @@
 Shifting token '-' (1.14: )
 Entering state 13
 Stack now 0 8 21 5 13
-Reducing stack by rule 18 (line 131):
+Reducing stack by rule 18 (line 143):
    $1 = token '!' (1.13: )
    $2 = token '-' (1.14: )
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+  | error
+./calc.at:1360:  $PREPARSER ./calc  input
+./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token '!' (1.2: )
+Shifting token '!' (1.2: )
+Entering state 5
+Stack now 0 4 5
+Reading a token
+Next token is token '!' (1.3: )
+Shifting token '!' (1.3: )
+Entering state 16
+Stack now 0 4 5 16
+Reducing stack by rule 16 (line 129):
+   $1 = token '!' (1.2: )
+   $2 = token '!' (1.3: )
+Stack now 0 4
+Shifting token error (1.2-3: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.4: )
+Shifting token ')' (1.4: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-3: )
+   $3 = token ')' (1.4: )
+-> $$ = nterm exp (1.1-4: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
+Next token is token '+' (1.6: )
+Shifting token '+' (1.6: )
 Entering state 21
 Stack now 0 8 21
 Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
+Next token is token '(' (1.8: )
+Shifting token '(' (1.8: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+Next token is token number (1.9: 1)
+Shifting token number (1.9: 1)
 Entering state 1
-Stack now 0 8 21 1
+Stack now 0 8 21 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
+   $1 = token number (1.9: 1)
+-> $$ = nterm exp (1.9: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Reading a token
+Next token is token number (1.11: 2)
+1.11: syntax error, unexpected number
+Error: popping nterm exp (1.9: 1)
+Stack now 0 8 21 4
+Shifting token error (1.9-11: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token number (1.11: 2)
+Error: discarding token number (1.11: 2)
+Error: popping token error (1.9-11: )
+Stack now 0 8 21 4
+Shifting token error (1.9-11: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.12: )
+Shifting token ')' (1.12: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.8: )
+   $2 = token error (1.9-11: )
+   $3 = token ')' (1.12: )
+-> $$ = nterm exp (1.8-12: 1111)
 Entering state 30
 Stack now 0 8 21 30
 Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
+Next token is token '=' (1.14: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-4: 1111)
+   $2 = token '+' (1.6: )
+   $3 = nterm exp (1.8-12: 1111)
+-> $$ = nterm exp (1.1-12: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.14: )
+Shifting token '=' (1.14: )
+Entering state 19
+Stack now 0 8 19
 Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
 Entering state 1
-Stack now 0 8 21 30 22 1
+Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
+Stack now 0 8 19 28
 Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 120):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
+Next token is token '\n' (1.17-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-12: 2222)
+   $2 = token '=' (1.14: )
+   $3 = nterm exp (1.16: 1)
+1.1-16: error: 2222 != 1
+-> $$ = nterm exp (1.1-16: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Stack now 0 8 21
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
 Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
+  | 
+  | +1
+./calc.at:1358:  $PREPARSER ./calc  input
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token invalid token (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token (1.1: )
+Stack now 0
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Stack now 0 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Stack now 0
+Cleanup: discarding lookahead token '+' (2.1: )
+Stack now 0
+./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token '!' (1.2: )
+Shifting token '!' (1.2: )
 Entering state 5
-Stack now 0 8 21 5
+Stack now 0 4 5
 Reading a token
-Next token is token '*' (1.14: )
-Shifting token '*' (1.14: )
-Entering state 15
-Stack now 0 8 21 5 15
-Reducing stack by rule 19 (line 144):
-   $1 = token '!' (1.13: )
-   $2 = token '*' (1.14: )
-1.14: memory exhausted
+Next token is token '!' (1.3: )
+Shifting token '!' (1.3: )
+Entering state 16
+Stack now 0 4 5 16
+Reducing stack by rule 16 (line 129):
+   $1 = token '!' (1.2: )
+   $2 = token '!' (1.3: )
+Stack now 0 4
+Shifting token error (1.2-3: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.4: )
+Shifting token ')' (1.4: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-3: )
+   $3 = token ')' (1.4: )
+-> $$ = nterm exp (1.1-4: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.6: )
+Shifting token '+' (1.6: )
+Entering state 21
 Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-stderr:
+Reading a token
+Next token is token '(' (1.8: )
+Shifting token '(' (1.8: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+Next token is token number (1.9: 1)
+Shifting token number (1.9: 1)
+Entering state 1
+Stack now 0 8 21 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 1)
+-> $$ = nterm exp (1.9: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Reading a token
+Next token is token number (1.11: 2)
+1.11: syntax error, unexpected number
+Error: popping nterm exp (1.9: 1)
+Stack now 0 8 21 4
+Shifting token error (1.9-11: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token number (1.11: 2)
+Error: discarding token number (1.11: 2)
+Error: popping token error (1.9-11: )
+Stack now 0 8 21 4
+Shifting token error (1.9-11: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.12: )
+Shifting token ')' (1.12: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.8: )
+   $2 = token error (1.9-11: )
+   $3 = token ')' (1.12: )
+-> $$ = nterm exp (1.8-12: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '=' (1.14: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-4: 1111)
+   $2 = token '+' (1.6: )
+   $3 = nterm exp (1.8-12: 1111)
+-> $$ = nterm exp (1.1-12: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.14: )
+Shifting token '=' (1.14: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-12: 2222)
+   $2 = token '=' (1.14: )
+   $3 = nterm exp (1.16: 1)
+1.1-16: error: 2222 != 1
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 Starting parse
 Entering state 0
 Stack now 0
@@ -52709,7 +56381,7 @@
 Stack now 0 8 21 30 22 31
 Reading a token
 Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 114):
+Reducing stack by rule 9 (line 120):
    $1 = nterm exp (1.5: 2)
    $2 = token '*' (1.7: )
    $3 = nterm exp (1.9: 3)
@@ -52717,7 +56389,7 @@
 Entering state 30
 Stack now 0 8 21 30
 Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 118):
    $1 = nterm exp (1.1: 1)
    $2 = token '+' (1.3: )
    $3 = nterm exp (1.5-9: 6)
@@ -52738,12 +56410,51 @@
 Shifting token '-' (1.14: )
 Entering state 13
 Stack now 0 8 21 5 13
-Reducing stack by rule 18 (line 131):
+Reducing stack by rule 18 (line 143):
    $1 = token '!' (1.13: )
    $2 = token '-' (1.14: )
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Stack now 0 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Stack now 0
+Cleanup: discarding lookahead token '+' (2.1: )
+Stack now 0
+stderr:
+./calc.at:1357: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1355: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -52754,7 +56465,15 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1357: "$PERL" -pi -e 'use strict;
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token invalid token (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token (1.1: )
+Stack now 0
+./calc.at:1360: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -52764,138 +56483,30 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1355: cat stderr
 ./calc.at:1357: cat stderr
+./calc.at:1355: cat stderr
+./calc.at:1358: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1360: cat stderr
+input:
 input:
-  | (#) + (#) = 2222
-./calc.at:1355:  $PREPARSER ./calc  input
 input:
+  | 1 = 2 = 3
+./calc.at:1360:  $PREPARSER ./calc  input
   | 1 + 2 * 3 + !* ++
+./calc.at:1355:  $PREPARSER ./calc  input
+./calc.at:1358: cat stderr
+  | (- *) + (1 2) = 1
 ./calc.at:1357:  $PREPARSER ./calc  input
 stderr:
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-1.8: syntax error: invalid character: '#'
-Shifting token error (1.8: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token invalid token (1.8: )
-Error: discarding token invalid token (1.8: )
-Error: popping token error (1.8: )
-Stack now 0 8 21 4
-Shifting token error (1.8: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.13-16: 2222)
-Shifting token number (1.13-16: 2222)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -52941,7 +56552,7 @@
 Stack now 0 8 21 30 22 31
 Reading a token
 Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 114):
+Reducing stack by rule 9 (line 120):
    $1 = nterm exp (1.5: 2)
    $2 = token '*' (1.7: )
    $3 = nterm exp (1.9: 3)
@@ -52949,7 +56560,7 @@
 Entering state 30
 Stack now 0 8 21 30
 Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 118):
    $1 = nterm exp (1.1: 1)
    $2 = token '+' (1.3: )
    $3 = nterm exp (1.5-9: 6)
@@ -52970,121 +56581,188 @@
 Shifting token '*' (1.14: )
 Entering state 15
 Stack now 0 8 21 5 15
-Reducing stack by rule 19 (line 132):
+Reducing stack by rule 19 (line 144):
    $1 = token '!' (1.13: )
    $2 = token '*' (1.14: )
 1.14: memory exhausted
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Stack now 0 8 19
+Error: popping token '=' (1.3: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '=' (1.7: )
+Stack now 0
+./calc.at:1358:  $PREPARSER ./calc  /dev/null
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
 Next token is token '(' (1.1: )
 Shifting token '(' (1.1: )
 Entering state 4
 Stack now 0 4
 Reading a token
-1.2: syntax error: invalid character: '#'
-Shifting token error (1.2: )
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
+Entering state 2
+Stack now 0 4 2
+Reading a token
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Stack now 0 4 2 9
+Reducing stack by rule 15 (line 128):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
+Stack now 0 4
+Shifting token error (1.2-4: )
 Entering state 11
 Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.2: )
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Error: popping token error (1.2-4: )
 Stack now 0 4
-Shifting token error (1.2: )
+Shifting token error (1.2-4: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
 Entering state 26
 Stack now 0 4 11 26
-Reducing stack by rule 14 (line 139):
+Reducing stack by rule 14 (line 127):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
 Entering state 21
 Stack now 0 8 21
 Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
 Entering state 4
 Stack now 0 8 21 4
 Reading a token
-1.8: syntax error: invalid character: '#'
-Shifting token error (1.8: )
+Next token is token number (1.10: 1)
+Shifting token number (1.10: 1)
+Entering state 1
+Stack now 0 8 21 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Reading a token
+Next token is token number (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
+Stack now 0 8 21 4
+Shifting token error (1.10-12: )
 Entering state 11
 Stack now 0 8 21 4 11
-Next token is token invalid token (1.8: )
-Error: discarding token invalid token (1.8: )
-Error: popping token error (1.8: )
+Next token is token number (1.12: 2)
+Error: discarding token number (1.12: 2)
+Error: popping token error (1.10-12: )
 Stack now 0 8 21 4
-Shifting token error (1.8: )
+Shifting token error (1.10-12: )
 Entering state 11
 Stack now 0 8 21 4 11
 Reading a token
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
 Entering state 26
 Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
 Entering state 30
 Stack now 0 8 21 30
 Reading a token
-Next token is token '=' (1.11: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
+Next token is token '=' (1.15: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
 Entering state 19
 Stack now 0 8 19
 Reading a token
-Next token is token number (1.13-16: 2222)
-Shifting token number (1.13-16: 2222)
+Next token is token number (1.17: 1)
+Shifting token number (1.17: 1)
 Entering state 1
 Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
+   $1 = token number (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
 Entering state 28
 Stack now 0 8 19 28
 Reading a token
-Next token is token '\n' (1.17-2.0: )
+Next token is token '\n' (1.18-2.0: )
 Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -53101,6 +56779,8 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -53146,7 +56826,7 @@
 Stack now 0 8 21 30 22 31
 Reading a token
 Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 114):
+Reducing stack by rule 9 (line 120):
    $1 = nterm exp (1.5: 2)
    $2 = token '*' (1.7: )
    $3 = nterm exp (1.9: 3)
@@ -53154,7 +56834,7 @@
 Entering state 30
 Stack now 0 8 21 30
 Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 118):
    $1 = nterm exp (1.1: 1)
    $2 = token '+' (1.3: )
    $3 = nterm exp (1.5-9: 6)
@@ -53175,39 +56855,64 @@
 Shifting token '*' (1.14: )
 Entering state 15
 Stack now 0 8 21 5 15
-Reducing stack by rule 19 (line 132):
+Reducing stack by rule 19 (line 144):
    $1 = token '!' (1.13: )
    $2 = token '*' (1.14: )
 1.14: memory exhausted
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1355: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1355: cat stderr
-./calc.at:1357: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-./calc.at:1357: cat stderr
-  | (1 + #) = 1111
-./calc.at:1355:  $PREPARSER ./calc  input
-input:
+./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Stack now 0 8 19
+Error: popping token '=' (1.3: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '=' (1.7: )
+Stack now 0
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Now at end of input.
+1.1: syntax error, unexpected end of file
+Cleanup: discarding lookahead token end of file (1.1: )
+Stack now 0
+./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -53218,79 +56923,129 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
+Entering state 2
+Stack now 0 4 2
 Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Stack now 0 4 2 9
+Reducing stack by rule 15 (line 128):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
 Stack now 0 4
-Shifting token error (1.2-6: )
+Shifting token error (1.2-4: )
 Entering state 11
 Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Error: popping token error (1.2-4: )
 Stack now 0 4
-Shifting token error (1.2-6: )
+Shifting token error (1.2-4: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
 Entering state 26
 Stack now 0 4 11 26
-Reducing stack by rule 14 (line 139):
+Reducing stack by rule 14 (line 127):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+Next token is token number (1.10: 1)
+Shifting token number (1.10: 1)
+Entering state 1
+Stack now 0 8 21 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Reading a token
+Next token is token number (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
+Stack now 0 8 21 4
+Shifting token error (1.10-12: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token number (1.12: 2)
+Error: discarding token number (1.12: 2)
+Error: popping token error (1.10-12: )
+Stack now 0 8 21 4
+Shifting token error (1.10-12: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '=' (1.15: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
 Entering state 19
 Stack now 0 8 19
 Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
+Next token is token number (1.17: 1)
+Shifting token number (1.17: 1)
 Entering state 1
 Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
+   $1 = token number (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
 Entering state 28
 Stack now 0 8 19 28
 Reading a token
-Next token is token '\n' (1.15-2.0: )
+Next token is token '\n' (1.18-2.0: )
 Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -53307,11 +57062,70 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (#) + (#) = 2222
-./calc.at:1357:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1355: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Now at end of input.
+1.1: syntax error, unexpected end of file
+Cleanup: discarding lookahead token end of file (1.1: )
+Stack now 0
+./calc.at:1357: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1360: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1357: cat stderr
+./calc.at:1360: cat stderr
+./calc.at:1355: cat stderr
+./calc.at:1358: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+  | (#) + (#) = 2222
+./calc.at:1355:  $PREPARSER ./calc  input
+input:
 stderr:
+input:
+./calc.at:1358: cat stderr
+  | (* *) + (*) + (*)
+  | 
+  | +1
+./calc.at:1360:  $PREPARSER ./calc  input
+./calc.at:1357:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -53337,7 +57151,7 @@
 Shifting token ')' (1.3: )
 Entering state 26
 Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.1: )
    $2 = token error (1.2: )
    $3 = token ')' (1.3: )
@@ -53371,7 +57185,7 @@
 Shifting token ')' (1.9: )
 Entering state 26
 Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.7: )
    $2 = token error (1.8: )
    $3 = token ')' (1.9: )
@@ -53380,7 +57194,7 @@
 Stack now 0 8 21 30
 Reading a token
 Next token is token '=' (1.11: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 118):
    $1 = nterm exp (1.1-3: 1111)
    $2 = token '+' (1.5: )
    $3 = nterm exp (1.7-9: 1111)
@@ -53433,7 +57247,36 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Stack now 0 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Stack now 0
+Cleanup: discarding lookahead token '+' (2.1: )
+Stack now 0
+stderr:
+input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -53443,79 +57286,132 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
+Error: popping token error (1.2: )
 Stack now 0 4
-Shifting token error (1.2-6: )
+Shifting token error (1.2: )
 Entering state 11
 Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
+Reading a token
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Error: popping token error (1.2: )
 Stack now 0 4
-Shifting token error (1.2-6: )
+Shifting token error (1.2-4: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
 Entering state 26
 Stack now 0 4 11 26
-Reducing stack by rule 14 (line 139):
+Reducing stack by rule 14 (line 127):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Stack now 0 8 21 4
 Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
+Error: popping token error (1.10: )
+Stack now 0 8 21 4
+Shifting token error (1.10: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.9-11: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
+Error: popping token error (1.16: )
+Stack now 0 8 21 4
+Shifting token error (1.16: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
+   $1 = nterm exp (1.1-17: 3333)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -53532,7 +57428,10 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-stderr:
+./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1358:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -53558,7 +57457,7 @@
 Shifting token ')' (1.3: )
 Entering state 26
 Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.1: )
    $2 = token error (1.2: )
    $3 = token ')' (1.3: )
@@ -53592,7 +57491,7 @@
 Shifting token ')' (1.9: )
 Entering state 26
 Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.7: )
    $2 = token error (1.8: )
    $3 = token ')' (1.9: )
@@ -53601,7 +57500,7 @@
 Stack now 0 8 21 30
 Reading a token
 Next token is token '=' (1.11: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 118):
    $1 = nterm exp (1.1-3: 1111)
    $2 = token '+' (1.5: )
    $3 = nterm exp (1.7-9: 1111)
@@ -53654,30 +57553,8 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1355: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1355: cat stderr
-./calc.at:1357: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-  | (# + 1) = 1111
-./calc.at:1355:  $PREPARSER ./calc  input
+stderr:
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -53688,76 +57565,132 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-1.2: syntax error: invalid character: '#'
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 Shifting token error (1.2: )
 Entering state 11
 Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
 Error: popping token error (1.2: )
 Stack now 0 4
 Shifting token error (1.2: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
 Error: popping token error (1.2: )
 Stack now 0 4
 Shifting token error (1.2-4: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
-Error: popping token error (1.2-4: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
 Entering state 26
 Stack now 0 4 11 26
-Reducing stack by rule 14 (line 139):
+Reducing stack by rule 14 (line 127):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Stack now 0 8 21 4
 Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
+Error: popping token error (1.10: )
+Stack now 0 8 21 4
+Shifting token error (1.10: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.9-11: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
+Error: popping token error (1.16: )
+Stack now 0 8 21 4
+Shifting token error (1.16: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
+   $1 = nterm exp (1.1-17: 3333)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -53774,9 +57707,31 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1357: cat stderr
-stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Stack now 0 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Stack now 0
+Cleanup: discarding lookahead token '+' (2.1: )
+Stack now 0
 Starting parse
 Entering state 0
 Stack now 0
@@ -53785,290 +57740,326 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Stack now 0 4
+Return for a new token:
 Reading a token
-1.2: syntax error: invalid character: '#'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
+Next token is token ')' (1.2: )
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
 Shifting token error (1.2: )
 Entering state 11
 Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
-Error: popping token error (1.2-4: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
+Next token is token ')' (1.2: )
+Shifting token ')' (1.2: )
 Entering state 26
 Stack now 0 4 11 26
-Reducing stack by rule 14 (line 139):
+Reducing stack by rule 14 (line 127):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+   $2 = token error (1.2: )
+   $3 = token ')' (1.2: )
+-> $$ = nterm exp (1.1-2: 1111)
 Entering state 8
 Stack now 0 8
+Return for a new token:
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 8 21
+Return for a new token:
 Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
+Next token is token '(' (1.6: )
+Shifting token '(' (1.6: )
+Entering state 4
+Stack now 0 8 21 4
+Return for a new token:
+Reading a token
+Next token is token number (1.7: 1)
+Shifting token number (1.7: 1)
 Entering state 1
-Stack now 0 8 19 1
+Stack now 0 8 21 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
+   $1 = token number (1.7: 1)
+-> $$ = nterm exp (1.7: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Return for a new token:
 Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
+Next token is token '+' (1.9: )
+Shifting token '+' (1.9: )
+Entering state 21
+Stack now 0 8 21 4 12 21
+Return for a new token:
 Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-input:
-  | (1 + #) = 1111
-./calc.at:1355: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1357:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token number (1.11: 1)
+Shifting token number (1.11: 1)
+Entering state 1
+Stack now 0 8 21 4 12 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.11: 1)
+-> $$ = nterm exp (1.11: 1)
+Entering state 30
+Stack now 0 8 21 4 12 21 30
+Return for a new token:
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
+Next token is token '+' (1.13: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.7: 1)
+   $2 = token '+' (1.9: )
+   $3 = nterm exp (1.11: 1)
+-> $$ = nterm exp (1.7-11: 2)
+Entering state 12
+Stack now 0 8 21 4 12
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
+Stack now 0 8 21 4 12 21
+Return for a new token:
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
+Next token is token number (1.15: 1)
+Shifting token number (1.15: 1)
 Entering state 1
-Stack now 0 4 1
+Stack now 0 8 21 4 12 21 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
+   $1 = token number (1.15: 1)
+-> $$ = nterm exp (1.15: 1)
+Entering state 30
+Stack now 0 8 21 4 12 21 30
+Return for a new token:
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
+Next token is token '+' (1.17: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.7-11: 2)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15: 1)
+-> $$ = nterm exp (1.7-15: 3)
+Entering state 12
+Stack now 0 8 21 4 12
+Next token is token '+' (1.17: )
+Shifting token '+' (1.17: )
 Entering state 21
-Stack now 0 4 12 21
+Stack now 0 8 21 4 12 21
+Return for a new token:
 Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
+Next token is token ')' (1.18: )
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' (1.17: )
+Stack now 0 8 21 4 12
+Error: popping nterm exp (1.7-15: 3)
+Stack now 0 8 21 4
+Shifting token error (1.7-18: )
 Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
+Stack now 0 8 21 4 11
+Next token is token ')' (1.18: )
+Shifting token ')' (1.18: )
 Entering state 26
-Stack now 0 4 11 26
+Stack now 0 8 21 4 11 26
 Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+   $1 = token '(' (1.6: )
+   $2 = token error (1.7-18: )
+   $3 = token ')' (1.18: )
+-> $$ = nterm exp (1.6-18: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Return for a new token:
+Reading a token
+Next token is token '+' (1.20: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-2: 1111)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6-18: 1111)
+-> $$ = nterm exp (1.1-18: 2222)
 Entering state 8
 Stack now 0 8
+Next token is token '+' (1.20: )
+Shifting token '+' (1.20: )
+Entering state 21
+Stack now 0 8 21
+Return for a new token:
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
+Next token is token '(' (1.22: )
+Shifting token '(' (1.22: )
+Entering state 4
+Stack now 0 8 21 4
+Return for a new token:
 Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
+Next token is token '*' (1.23: )
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.23: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.23: )
+Error: discarding token '*' (1.23: )
+Error: popping token error (1.23: )
+Stack now 0 8 21 4
+Shifting token error (1.23: )
+Entering state 11
+Stack now 0 8 21 4 11
+Return for a new token:
 Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+Next token is token '*' (1.25: )
+Error: discarding token '*' (1.25: )
+Error: popping token error (1.23: )
+Stack now 0 8 21 4
+Shifting token error (1.23-25: )
+Entering state 11
+Stack now 0 8 21 4 11
+Return for a new token:
+Reading a token
+Next token is token '*' (1.27: )
+Error: discarding token '*' (1.27: )
+Error: popping token error (1.23-25: )
+Stack now 0 8 21 4
+Shifting token error (1.23-27: )
+Entering state 11
+Stack now 0 8 21 4 11
+Return for a new token:
+Reading a token
+Next token is token ')' (1.28: )
+Shifting token ')' (1.28: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.22: )
+   $2 = token error (1.23-27: )
+   $3 = token ')' (1.28: )
+-> $$ = nterm exp (1.22-28: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Return for a new token:
+Reading a token
+Next token is token '+' (1.30: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-18: 2222)
+   $2 = token '+' (1.20: )
+   $3 = nterm exp (1.22-28: 1111)
+-> $$ = nterm exp (1.1-28: 3333)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1355: cat stderr
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '+' (1.30: )
+Shifting token '+' (1.30: )
+Entering state 21
+Stack now 0 8 21
+Return for a new token:
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
+Next token is token '(' (1.32: )
+Shifting token '(' (1.32: )
 Entering state 4
-Stack now 0 4
+Stack now 0 8 21 4
+Return for a new token:
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
+Next token is token number (1.33: 1)
+Shifting token number (1.33: 1)
 Entering state 1
-Stack now 0 4 1
+Stack now 0 8 21 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
+   $1 = token number (1.33: 1)
+-> $$ = nterm exp (1.33: 1)
 Entering state 12
-Stack now 0 4 12
+Stack now 0 8 21 4 12
+Return for a new token:
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
+Next token is token '*' (1.35: )
+Shifting token '*' (1.35: )
+Entering state 22
+Stack now 0 8 21 4 12 22
+Return for a new token:
 Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
+Next token is token number (1.37: 2)
+Shifting token number (1.37: 2)
+Entering state 1
+Stack now 0 8 21 4 12 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.37: 2)
+-> $$ = nterm exp (1.37: 2)
+Entering state 31
+Stack now 0 8 21 4 12 22 31
+Return for a new token:
+Reading a token
+Next token is token '*' (1.39: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.33: 1)
+   $2 = token '*' (1.35: )
+   $3 = nterm exp (1.37: 2)
+-> $$ = nterm exp (1.33-37: 2)
+Entering state 12
+Stack now 0 8 21 4 12
+Next token is token '*' (1.39: )
+Shifting token '*' (1.39: )
+Entering state 22
+Stack now 0 8 21 4 12 22
+Return for a new token:
+Reading a token
+Next token is token '*' (1.41: )
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' (1.39: )
+Stack now 0 8 21 4 12
+Error: popping nterm exp (1.33-37: 2)
+Stack now 0 8 21 4
+Shifting token error (1.33-41: )
 Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
+Stack now 0 8 21 4 11
+Next token is token '*' (1.41: )
+Error: discarding token '*' (1.41: )
+Error: popping token error (1.33-41: )
+Stack now 0 8 21 4
+Shifting token error (1.33-41: )
 Entering state 11
-Stack now 0 4 11
+Stack now 0 8 21 4 11
+Return for a new token:
 Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
+Next token is token ')' (1.42: )
+Shifting token ')' (1.42: )
 Entering state 26
-Stack now 0 4 11 26
+Stack now 0 8 21 4 11 26
 Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+   $1 = token '(' (1.32: )
+   $2 = token error (1.33-41: )
+   $3 = token ')' (1.42: )
+-> $$ = nterm exp (1.32-42: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Return for a new token:
+Reading a token
+Next token is token '=' (1.44: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-28: 3333)
+   $2 = token '+' (1.30: )
+   $3 = nterm exp (1.32-42: 1111)
+-> $$ = nterm exp (1.1-42: 4444)
 Entering state 8
 Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
+Next token is token '=' (1.44: )
+Shifting token '=' (1.44: )
 Entering state 19
 Stack now 0 8 19
+Return for a new token:
 Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
+Next token is token number (1.46: 1)
+Shifting token number (1.46: 1)
 Entering state 1
 Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
+   $1 = token number (1.46: 1)
+-> $$ = nterm exp (1.46: 1)
 Entering state 28
 Stack now 0 8 19 28
+Return for a new token:
 Reading a token
-Next token is token '\n' (1.15-2.0: )
+Next token is token '\n' (1.47-2.0: )
 Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+   $1 = nterm exp (1.1-42: 4444)
+   $2 = token '=' (1.44: )
+   $3 = nterm exp (1.46: 1)
+1.1-46: error: 4444 != 1
+-> $$ = nterm exp (1.1-46: 4444)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Next token is token '\n' (1.47-2.0: )
+Shifting token '\n' (1.47-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
+   $1 = nterm exp (1.1-46: 4444)
+   $2 = token '\n' (1.47-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -54077,6 +58068,7 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
+Return for a new token:
 Reading a token
 Now at end of input.
 Shifting token end of file (2.1: )
@@ -54085,7 +58077,9 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1357: "$PERL" -pi -e 'use strict;
+./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1355: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -54095,11 +58089,21 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1355:  $PREPARSER ./calc  input
-./calc.at:1357: cat stderr
-stderr:
+stdout:
+./calc.at:1362: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
+./calc.at:1357: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+./calc.at:1357: cat stderr
+./calc.at:1355: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -54108,311 +58112,326 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Stack now 0 4
+Return for a new token:
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-8: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
-Error: popping token error (1.2-8: )
-Stack now 0 4
-Shifting token error (1.2-10: )
+Next token is token ')' (1.2: )
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
 Entering state 11
 Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
+Next token is token ')' (1.2: )
+Shifting token ')' (1.2: )
 Entering state 26
 Stack now 0 4 11 26
-Reducing stack by rule 14 (line 139):
+Reducing stack by rule 14 (line 127):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
+   $2 = token error (1.2: )
+   $3 = token ')' (1.2: )
+-> $$ = nterm exp (1.1-2: 1111)
 Entering state 8
 Stack now 0 8
+Return for a new token:
 Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 19
-Stack now 0 8 19
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 8 21
+Return for a new token:
 Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
+Next token is token '(' (1.6: )
+Shifting token '(' (1.6: )
+Entering state 4
+Stack now 0 8 21 4
+Return for a new token:
+Reading a token
+Next token is token number (1.7: 1)
+Shifting token number (1.7: 1)
 Entering state 1
-Stack now 0 8 19 1
+Stack now 0 8 21 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 28
-Stack now 0 8 19 28
+   $1 = token number (1.7: 1)
+-> $$ = nterm exp (1.7: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Return for a new token:
 Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
+Next token is token '+' (1.9: )
+Shifting token '+' (1.9: )
+Entering state 21
+Stack now 0 8 21 4 12 21
+Return for a new token:
 Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1357:  $PREPARSER ./calc  input
-stderr:
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token number (1.11: 1)
+Shifting token number (1.11: 1)
+Entering state 1
+Stack now 0 8 21 4 12 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.11: 1)
+-> $$ = nterm exp (1.11: 1)
+Entering state 30
+Stack now 0 8 21 4 12 21 30
+Return for a new token:
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
+Next token is token '+' (1.13: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.7: 1)
+   $2 = token '+' (1.9: )
+   $3 = nterm exp (1.11: 1)
+-> $$ = nterm exp (1.7-11: 2)
+Entering state 12
+Stack now 0 8 21 4 12
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
+Stack now 0 8 21 4 12 21
+Return for a new token:
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
+Next token is token number (1.15: 1)
+Shifting token number (1.15: 1)
 Entering state 1
-Stack now 0 4 1
+Stack now 0 8 21 4 12 21 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
+   $1 = token number (1.15: 1)
+-> $$ = nterm exp (1.15: 1)
+Entering state 30
+Stack now 0 8 21 4 12 21 30
+Return for a new token:
+Reading a token
+Next token is token '+' (1.17: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.7-11: 2)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15: 1)
+-> $$ = nterm exp (1.7-15: 3)
 Entering state 12
-Stack now 0 4 12
+Stack now 0 8 21 4 12
+Next token is token '+' (1.17: )
+Shifting token '+' (1.17: )
+Entering state 21
+Stack now 0 8 21 4 12 21
+Return for a new token:
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
+Next token is token ')' (1.18: )
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' (1.17: )
+Stack now 0 8 21 4 12
+Error: popping nterm exp (1.7-15: 3)
+Stack now 0 8 21 4
+Shifting token error (1.7-18: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token ')' (1.18: )
+Shifting token ')' (1.18: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.6: )
+   $2 = token error (1.7-18: )
+   $3 = token ')' (1.18: )
+-> $$ = nterm exp (1.6-18: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Return for a new token:
+Reading a token
+Next token is token '+' (1.20: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-2: 1111)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6-18: 1111)
+-> $$ = nterm exp (1.1-18: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.20: )
+Shifting token '+' (1.20: )
 Entering state 21
-Stack now 0 4 12 21
+Stack now 0 8 21
+Return for a new token:
 Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
+Next token is token '(' (1.22: )
+Shifting token '(' (1.22: )
+Entering state 4
+Stack now 0 8 21 4
+Return for a new token:
+Reading a token
+Next token is token '*' (1.23: )
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.23: )
 Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
+Stack now 0 8 21 4 11
+Next token is token '*' (1.23: )
+Error: discarding token '*' (1.23: )
+Error: popping token error (1.23: )
+Stack now 0 8 21 4
+Shifting token error (1.23: )
 Entering state 11
-Stack now 0 4 11
+Stack now 0 8 21 4 11
+Return for a new token:
 Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-8: )
+Next token is token '*' (1.25: )
+Error: discarding token '*' (1.25: )
+Error: popping token error (1.23: )
+Stack now 0 8 21 4
+Shifting token error (1.23-25: )
 Entering state 11
-Stack now 0 4 11
+Stack now 0 8 21 4 11
+Return for a new token:
 Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
-Error: popping token error (1.2-8: )
-Stack now 0 4
-Shifting token error (1.2-10: )
+Next token is token '*' (1.27: )
+Error: discarding token '*' (1.27: )
+Error: popping token error (1.23-25: )
+Stack now 0 8 21 4
+Shifting token error (1.23-27: )
 Entering state 11
-Stack now 0 4 11
+Stack now 0 8 21 4 11
+Return for a new token:
 Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
+Next token is token ')' (1.28: )
+Shifting token ')' (1.28: )
 Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 139):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.22: )
+   $2 = token error (1.23-27: )
+   $3 = token ')' (1.28: )
+-> $$ = nterm exp (1.22-28: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Return for a new token:
+Reading a token
+Next token is token '+' (1.30: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-18: 2222)
+   $2 = token '+' (1.20: )
+   $3 = nterm exp (1.22-28: 1111)
+-> $$ = nterm exp (1.1-28: 3333)
 Entering state 8
 Stack now 0 8
+Next token is token '+' (1.30: )
+Shifting token '+' (1.30: )
+Entering state 21
+Stack now 0 8 21
+Return for a new token:
 Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 19
-Stack now 0 8 19
+Next token is token '(' (1.32: )
+Shifting token '(' (1.32: )
+Entering state 4
+Stack now 0 8 21 4
+Return for a new token:
 Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
+Next token is token number (1.33: 1)
+Shifting token number (1.33: 1)
 Entering state 1
-Stack now 0 8 19 1
+Stack now 0 8 21 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
+   $1 = token number (1.33: 1)
+-> $$ = nterm exp (1.33: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Return for a new token:
 Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '*' (1.35: )
+Shifting token '*' (1.35: )
+Entering state 22
+Stack now 0 8 21 4 12 22
+Return for a new token:
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
+Next token is token number (1.37: 2)
+Shifting token number (1.37: 2)
+Entering state 1
+Stack now 0 8 21 4 12 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.37: 2)
+-> $$ = nterm exp (1.37: 2)
+Entering state 31
+Stack now 0 8 21 4 12 22 31
+Return for a new token:
 Reading a token
-1.2: syntax error: invalid character: '#'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
+Next token is token '*' (1.39: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.33: 1)
+   $2 = token '*' (1.35: )
+   $3 = nterm exp (1.37: 2)
+-> $$ = nterm exp (1.33-37: 2)
+Entering state 12
+Stack now 0 8 21 4 12
+Next token is token '*' (1.39: )
+Shifting token '*' (1.39: )
+Entering state 22
+Stack now 0 8 21 4 12 22
+Return for a new token:
 Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2-4: )
+Next token is token '*' (1.41: )
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' (1.39: )
+Stack now 0 8 21 4 12
+Error: popping nterm exp (1.33-37: 2)
+Stack now 0 8 21 4
+Shifting token error (1.33-41: )
 Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
-Error: popping token error (1.2-4: )
-Stack now 0 4
-Shifting token error (1.2-6: )
+Stack now 0 8 21 4 11
+Next token is token '*' (1.41: )
+Error: discarding token '*' (1.41: )
+Error: popping token error (1.33-41: )
+Stack now 0 8 21 4
+Shifting token error (1.33-41: )
 Entering state 11
-Stack now 0 4 11
+Stack now 0 8 21 4 11
+Return for a new token:
 Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
+Next token is token ')' (1.42: )
+Shifting token ')' (1.42: )
 Entering state 26
-Stack now 0 4 11 26
+Stack now 0 8 21 4 11 26
 Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+   $1 = token '(' (1.32: )
+   $2 = token error (1.33-41: )
+   $3 = token ')' (1.42: )
+-> $$ = nterm exp (1.32-42: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Return for a new token:
+Reading a token
+Next token is token '=' (1.44: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-28: 3333)
+   $2 = token '+' (1.30: )
+   $3 = nterm exp (1.32-42: 1111)
+-> $$ = nterm exp (1.1-42: 4444)
 Entering state 8
 Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
+Next token is token '=' (1.44: )
+Shifting token '=' (1.44: )
 Entering state 19
 Stack now 0 8 19
+Return for a new token:
 Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
+Next token is token number (1.46: 1)
+Shifting token number (1.46: 1)
 Entering state 1
 Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
+   $1 = token number (1.46: 1)
+-> $$ = nterm exp (1.46: 1)
 Entering state 28
 Stack now 0 8 19 28
+Return for a new token:
 Reading a token
-Next token is token '\n' (1.15-2.0: )
+Next token is token '\n' (1.47-2.0: )
 Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+   $1 = nterm exp (1.1-42: 4444)
+   $2 = token '=' (1.44: )
+   $3 = nterm exp (1.46: 1)
+1.1-46: error: 4444 != 1
+-> $$ = nterm exp (1.1-46: 4444)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Next token is token '\n' (1.47-2.0: )
+Shifting token '\n' (1.47-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
+   $1 = nterm exp (1.1-46: 4444)
+   $2 = token '\n' (1.47-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -54421,6 +58440,7 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
+Return for a new token:
 Reading a token
 Now at end of input.
 Shifting token end of file (2.1: )
@@ -54429,12 +58449,7 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stdout:
-stderr:
-./calc.at:1358: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
-./calc.at:1355: "$PERL" -pi -e 'use strict;
+./calc.at:1360: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -54444,103 +58459,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
-Error: popping token error (1.2-4: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1355: cat stderr
-./calc.at:1358: "$PERL" -ne '
+./calc.at:1362: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -54551,7 +58470,11 @@
         || /\t/
         )' calc.c calc.h
 
-./calc.at:1357: "$PERL" -pi -e 'use strict;
+input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1357:  $PREPARSER ./calc  input
+./calc.at:1360: cat stderr
+./calc.at:1358: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -54561,10 +58484,91 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
+Reading a token
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Stack now 0 8 21 5 14
+Reducing stack by rule 17 (line 130):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
 input:
-./calc.at:1357: cat stderr
-  | (1 + 1) / (1 - 1)
+  | (1 + #) = 1111
 ./calc.at:1355:  $PREPARSER ./calc  input
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
@@ -54579,7 +58583,92 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1358:  $PREPARSER ./calc  input
+./calc.at:1362:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1358: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
+Reading a token
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Stack now 0 8 21 5 14
+Reducing stack by rule 17 (line 130):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1357: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+./calc.at:1360:  $PREPARSER ./calc  /dev/null
 stderr:
 Starting parse
 Entering state 0
@@ -54605,107 +58694,64 @@
 Entering state 21
 Stack now 0 4 12 21
 Reading a token
-Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
-Entering state 1
-Stack now 0 4 12 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 30
-Stack now 0 4 12 21 30
-Reading a token
-Next token is token ')' (1.7: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
 Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
 Next token is token ')' (1.7: )
 Shifting token ')' (1.7: )
-Entering state 27
-Stack now 0 4 12 27
-Reducing stack by rule 13 (line 138):
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
+   $2 = token error (1.2-6: )
    $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 23
-Stack now 0 8 23
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Stack now 0 8 23 4
-Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
-Entering state 1
-Stack now 0 8 23 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Stack now 0 8 23 4 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 20
-Stack now 0 8 23 4 12 20
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Stack now 0 8 19
 Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
 Entering state 1
-Stack now 0 8 23 4 12 20 1
+Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 29
-Stack now 0 8 23 4 12 20 29
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack by rule 8 (line 119):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Stack now 0 8 23 4 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 27
-Stack now 0 8 23 4 12 27
-Reducing stack by rule 13 (line 138):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 32
-Stack now 0 8 23 32
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Stack now 0 8 19 28
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 10 (line 121):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -54724,10 +58770,6 @@
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-input:
-stderr:
-  | (1 + # + 1) = 1111
-./calc.at:1357:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -54741,13 +58783,11 @@
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Stack now 0 8
-Return for a new token:
 Reading a token
 Next token is token '+' (1.3: )
 Shifting token '+' (1.3: )
 Entering state 21
 Stack now 0 8 21
-Return for a new token:
 Reading a token
 Next token is token number (1.5: 2)
 Shifting token number (1.5: 2)
@@ -54758,13 +58798,11 @@
 -> $$ = nterm exp (1.5: 2)
 Entering state 30
 Stack now 0 8 21 30
-Return for a new token:
 Reading a token
 Next token is token '*' (1.7: )
 Shifting token '*' (1.7: )
 Entering state 22
 Stack now 0 8 21 30 22
-Return for a new token:
 Reading a token
 Next token is token number (1.9: 3)
 Shifting token number (1.9: 3)
@@ -54775,7 +58813,6 @@
 -> $$ = nterm exp (1.9: 3)
 Entering state 31
 Stack now 0 8 21 30 22 31
-Return for a new token:
 Reading a token
 Next token is token '=' (1.11: )
 Reducing stack by rule 9 (line 114):
@@ -54797,7 +58834,6 @@
 Shifting token '=' (1.11: )
 Entering state 19
 Stack now 0 8 19
-Return for a new token:
 Reading a token
 Next token is token number (1.13: 7)
 Shifting token number (1.13: 7)
@@ -54808,7 +58844,6 @@
 -> $$ = nterm exp (1.13: 7)
 Entering state 28
 Stack now 0 8 19 28
-Return for a new token:
 Reading a token
 Next token is token '\n' (1.14-2.0: )
 Reducing stack by rule 6 (line 102):
@@ -54833,7 +58868,6 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token number (2.1: 1)
 Shifting token number (2.1: 1)
@@ -54844,13 +58878,11 @@
 -> $$ = nterm exp (2.1: 1)
 Entering state 8
 Stack now 0 6 8
-Return for a new token:
 Reading a token
 Next token is token '+' (2.3: )
 Shifting token '+' (2.3: )
 Entering state 21
 Stack now 0 6 8 21
-Return for a new token:
 Reading a token
 Next token is token number (2.5: 2)
 Shifting token number (2.5: 2)
@@ -54861,19 +58893,16 @@
 -> $$ = nterm exp (2.5: 2)
 Entering state 30
 Stack now 0 6 8 21 30
-Return for a new token:
 Reading a token
 Next token is token '*' (2.7: )
 Shifting token '*' (2.7: )
 Entering state 22
 Stack now 0 6 8 21 30 22
-Return for a new token:
 Reading a token
 Next token is token '-' (2.9: )
 Shifting token '-' (2.9: )
 Entering state 2
 Stack now 0 6 8 21 30 22 2
-Return for a new token:
 Reading a token
 Next token is token number (2.10: 3)
 Shifting token number (2.10: 3)
@@ -54884,7 +58913,6 @@
 -> $$ = nterm exp (2.10: 3)
 Entering state 10
 Stack now 0 6 8 21 30 22 2 10
-Return for a new token:
 Reading a token
 Next token is token '=' (2.12: )
 Reducing stack by rule 11 (line 124):
@@ -54913,13 +58941,11 @@
 Shifting token '=' (2.12: )
 Entering state 19
 Stack now 0 6 8 19
-Return for a new token:
 Reading a token
 Next token is token '-' (2.14: )
 Shifting token '-' (2.14: )
 Entering state 2
 Stack now 0 6 8 19 2
-Return for a new token:
 Reading a token
 Next token is token number (2.15: 5)
 Shifting token number (2.15: 5)
@@ -54930,7 +58956,6 @@
 -> $$ = nterm exp (2.15: 5)
 Entering state 10
 Stack now 0 6 8 19 2 10
-Return for a new token:
 Reading a token
 Next token is token '\n' (2.16-3.0: )
 Reducing stack by rule 11 (line 124):
@@ -54963,7 +58988,6 @@
 -> $$ = nterm input (1.1-3.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token '\n' (3.1-4.0: )
 Shifting token '\n' (3.1-4.0: )
@@ -54980,13 +59004,11 @@
 -> $$ = nterm input (1.1-4.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token '-' (4.1: )
 Shifting token '-' (4.1: )
 Entering state 2
 Stack now 0 6 2
-Return for a new token:
 Reading a token
 Next token is token number (4.2: 1)
 Shifting token number (4.2: 1)
@@ -54997,13 +59019,11 @@
 -> $$ = nterm exp (4.2: 1)
 Entering state 10
 Stack now 0 6 2 10
-Return for a new token:
 Reading a token
 Next token is token '^' (4.3: )
 Shifting token '^' (4.3: )
 Entering state 24
 Stack now 0 6 2 10 24
-Return for a new token:
 Reading a token
 Next token is token number (4.4: 2)
 Shifting token number (4.4: 2)
@@ -55014,7 +59034,6 @@
 -> $$ = nterm exp (4.4: 2)
 Entering state 33
 Stack now 0 6 2 10 24 33
-Return for a new token:
 Reading a token
 Next token is token '=' (4.6: )
 Reducing stack by rule 12 (line 125):
@@ -55035,13 +59054,11 @@
 Shifting token '=' (4.6: )
 Entering state 19
 Stack now 0 6 8 19
-Return for a new token:
 Reading a token
 Next token is token '-' (4.8: )
 Shifting token '-' (4.8: )
 Entering state 2
 Stack now 0 6 8 19 2
-Return for a new token:
 Reading a token
 Next token is token number (4.9: 1)
 Shifting token number (4.9: 1)
@@ -55052,7 +59069,6 @@
 -> $$ = nterm exp (4.9: 1)
 Entering state 10
 Stack now 0 6 8 19 2 10
-Return for a new token:
 Reading a token
 Next token is token '\n' (4.10-5.0: )
 Reducing stack by rule 11 (line 124):
@@ -55085,19 +59101,16 @@
 -> $$ = nterm input (1.1-5.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token '(' (5.1: )
 Shifting token '(' (5.1: )
 Entering state 4
 Stack now 0 6 4
-Return for a new token:
 Reading a token
 Next token is token '-' (5.2: )
 Shifting token '-' (5.2: )
 Entering state 2
 Stack now 0 6 4 2
-Return for a new token:
 Reading a token
 Next token is token number (5.3: 1)
 Shifting token number (5.3: 1)
@@ -55108,7 +59121,6 @@
 -> $$ = nterm exp (5.3: 1)
 Entering state 10
 Stack now 0 6 4 2 10
-Return for a new token:
 Reading a token
 Next token is token ')' (5.4: )
 Reducing stack by rule 11 (line 124):
@@ -55128,13 +59140,11 @@
 -> $$ = nterm exp (5.1-4: -1)
 Entering state 8
 Stack now 0 6 8
-Return for a new token:
 Reading a token
 Next token is token '^' (5.5: )
 Shifting token '^' (5.5: )
 Entering state 24
 Stack now 0 6 8 24
-Return for a new token:
 Reading a token
 Next token is token number (5.6: 2)
 Shifting token number (5.6: 2)
@@ -55145,7 +59155,6 @@
 -> $$ = nterm exp (5.6: 2)
 Entering state 33
 Stack now 0 6 8 24 33
-Return for a new token:
 Reading a token
 Next token is token '=' (5.8: )
 Reducing stack by rule 12 (line 125):
@@ -55159,7 +59168,6 @@
 Shifting token '=' (5.8: )
 Entering state 19
 Stack now 0 6 8 19
-Return for a new token:
 Reading a token
 Next token is token number (5.10: 1)
 Shifting token number (5.10: 1)
@@ -55170,7 +59178,6 @@
 -> $$ = nterm exp (5.10: 1)
 Entering state 28
 Stack now 0 6 8 19 28
-Return for a new token:
 Reading a token
 Next token is token '\n' (5.11-6.0: )
 Reducing stack by rule 6 (line 102):
@@ -55196,7 +59203,6 @@
 -> $$ = nterm input (1.1-6.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token '\n' (6.1-7.0: )
 Shifting token '\n' (6.1-7.0: )
@@ -55213,25 +59219,21 @@
 -> $$ = nterm input (1.1-7.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token '-' (7.1: )
 Shifting token '-' (7.1: )
 Entering state 2
 Stack now 0 6 2
-Return for a new token:
 Reading a token
 Next token is token '-' (7.2: )
 Shifting token '-' (7.2: )
 Entering state 2
 Stack now 0 6 2 2
-Return for a new token:
 Reading a token
 Next token is token '-' (7.3: )
 Shifting token '-' (7.3: )
 Entering state 2
 Stack now 0 6 2 2 2
-Return for a new token:
 Reading a token
 Next token is token number (7.4: 1)
 Shifting token number (7.4: 1)
@@ -55242,7 +59244,6 @@
 -> $$ = nterm exp (7.4: 1)
 Entering state 10
 Stack now 0 6 2 2 2 10
-Return for a new token:
 Reading a token
 Next token is token '=' (7.6: )
 Reducing stack by rule 11 (line 124):
@@ -55269,13 +59270,11 @@
 Shifting token '=' (7.6: )
 Entering state 19
 Stack now 0 6 8 19
-Return for a new token:
 Reading a token
 Next token is token '-' (7.8: )
 Shifting token '-' (7.8: )
 Entering state 2
 Stack now 0 6 8 19 2
-Return for a new token:
 Reading a token
 Next token is token number (7.9: 1)
 Shifting token number (7.9: 1)
@@ -55286,7 +59285,6 @@
 -> $$ = nterm exp (7.9: 1)
 Entering state 10
 Stack now 0 6 8 19 2 10
-Return for a new token:
 Reading a token
 Next token is token '\n' (7.10-8.0: )
 Reducing stack by rule 11 (line 124):
@@ -55319,7 +59317,6 @@
 -> $$ = nterm input (1.1-8.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token '\n' (8.1-9.0: )
 Shifting token '\n' (8.1-9.0: )
@@ -55336,7 +59333,6 @@
 -> $$ = nterm input (1.1-9.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token number (9.1: 1)
 Shifting token number (9.1: 1)
@@ -55347,13 +59343,11 @@
 -> $$ = nterm exp (9.1: 1)
 Entering state 8
 Stack now 0 6 8
-Return for a new token:
 Reading a token
 Next token is token '-' (9.3: )
 Shifting token '-' (9.3: )
 Entering state 20
 Stack now 0 6 8 20
-Return for a new token:
 Reading a token
 Next token is token number (9.5: 2)
 Shifting token number (9.5: 2)
@@ -55364,7 +59358,6 @@
 -> $$ = nterm exp (9.5: 2)
 Entering state 29
 Stack now 0 6 8 20 29
-Return for a new token:
 Reading a token
 Next token is token '-' (9.7: )
 Reducing stack by rule 8 (line 113):
@@ -55378,7 +59371,6 @@
 Shifting token '-' (9.7: )
 Entering state 20
 Stack now 0 6 8 20
-Return for a new token:
 Reading a token
 Next token is token number (9.9: 3)
 Shifting token number (9.9: 3)
@@ -55389,7 +59381,6 @@
 -> $$ = nterm exp (9.9: 3)
 Entering state 29
 Stack now 0 6 8 20 29
-Return for a new token:
 Reading a token
 Next token is token '=' (9.11: )
 Reducing stack by rule 8 (line 113):
@@ -55403,13 +59394,11 @@
 Shifting token '=' (9.11: )
 Entering state 19
 Stack now 0 6 8 19
-Return for a new token:
 Reading a token
 Next token is token '-' (9.13: )
 Shifting token '-' (9.13: )
 Entering state 2
 Stack now 0 6 8 19 2
-Return for a new token:
 Reading a token
 Next token is token number (9.14: 4)
 Shifting token number (9.14: 4)
@@ -55420,7 +59409,6 @@
 -> $$ = nterm exp (9.14: 4)
 Entering state 10
 Stack now 0 6 8 19 2 10
-Return for a new token:
 Reading a token
 Next token is token '\n' (9.15-10.0: )
 Reducing stack by rule 11 (line 124):
@@ -55453,7 +59441,6 @@
 -> $$ = nterm input (1.1-10.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token number (10.1: 1)
 Shifting token number (10.1: 1)
@@ -55464,19 +59451,16 @@
 -> $$ = nterm exp (10.1: 1)
 Entering state 8
 Stack now 0 6 8
-Return for a new token:
 Reading a token
 Next token is token '-' (10.3: )
 Shifting token '-' (10.3: )
 Entering state 20
 Stack now 0 6 8 20
-Return for a new token:
 Reading a token
 Next token is token '(' (10.5: )
 Shifting token '(' (10.5: )
 Entering state 4
 Stack now 0 6 8 20 4
-Return for a new token:
 Reading a token
 Next token is token number (10.6: 2)
 Shifting token number (10.6: 2)
@@ -55487,13 +59471,11 @@
 -> $$ = nterm exp (10.6: 2)
 Entering state 12
 Stack now 0 6 8 20 4 12
-Return for a new token:
 Reading a token
 Next token is token '-' (10.8: )
 Shifting token '-' (10.8: )
 Entering state 20
 Stack now 0 6 8 20 4 12 20
-Return for a new token:
 Reading a token
 Next token is token number (10.10: 3)
 Shifting token number (10.10: 3)
@@ -55504,7 +59486,6 @@
 -> $$ = nterm exp (10.10: 3)
 Entering state 29
 Stack now 0 6 8 20 4 12 20 29
-Return for a new token:
 Reading a token
 Next token is token ')' (10.11: )
 Reducing stack by rule 8 (line 113):
@@ -55525,7 +59506,6 @@
 -> $$ = nterm exp (10.5-11: -1)
 Entering state 29
 Stack now 0 6 8 20 29
-Return for a new token:
 Reading a token
 Next token is token '=' (10.13: )
 Reducing stack by rule 8 (line 113):
@@ -55539,7 +59519,6 @@
 Shifting token '=' (10.13: )
 Entering state 19
 Stack now 0 6 8 19
-Return for a new token:
 Reading a token
 Next token is token number (10.15: 2)
 Shifting token number (10.15: 2)
@@ -55550,7 +59529,6 @@
 -> $$ = nterm exp (10.15: 2)
 Entering state 28
 Stack now 0 6 8 19 28
-Return for a new token:
 Reading a token
 Next token is token '\n' (10.16-11.0: )
 Reducing stack by rule 6 (line 102):
@@ -55576,7 +59554,6 @@
 -> $$ = nterm input (1.1-11.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token '\n' (11.1-12.0: )
 Shifting token '\n' (11.1-12.0: )
@@ -55593,7 +59570,6 @@
 -> $$ = nterm input (1.1-12.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token number (12.1: 2)
 Shifting token number (12.1: 2)
@@ -55604,13 +59580,11 @@
 -> $$ = nterm exp (12.1: 2)
 Entering state 8
 Stack now 0 6 8
-Return for a new token:
 Reading a token
 Next token is token '^' (12.2: )
 Shifting token '^' (12.2: )
 Entering state 24
 Stack now 0 6 8 24
-Return for a new token:
 Reading a token
 Next token is token number (12.3: 2)
 Shifting token number (12.3: 2)
@@ -55621,13 +59595,11 @@
 -> $$ = nterm exp (12.3: 2)
 Entering state 33
 Stack now 0 6 8 24 33
-Return for a new token:
 Reading a token
 Next token is token '^' (12.4: )
 Shifting token '^' (12.4: )
 Entering state 24
 Stack now 0 6 8 24 33 24
-Return for a new token:
 Reading a token
 Next token is token number (12.5: 3)
 Shifting token number (12.5: 3)
@@ -55638,7 +59610,6 @@
 -> $$ = nterm exp (12.5: 3)
 Entering state 33
 Stack now 0 6 8 24 33 24 33
-Return for a new token:
 Reading a token
 Next token is token '=' (12.7: )
 Reducing stack by rule 12 (line 125):
@@ -55660,7 +59631,6 @@
 Shifting token '=' (12.7: )
 Entering state 19
 Stack now 0 6 8 19
-Return for a new token:
 Reading a token
 Next token is token number (12.9-11: 256)
 Shifting token number (12.9-11: 256)
@@ -55671,7 +59641,6 @@
 -> $$ = nterm exp (12.9-11: 256)
 Entering state 28
 Stack now 0 6 8 19 28
-Return for a new token:
 Reading a token
 Next token is token '\n' (12.12-13.0: )
 Reducing stack by rule 6 (line 102):
@@ -55697,13 +59666,11 @@
 -> $$ = nterm input (1.1-13.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token '(' (13.1: )
 Shifting token '(' (13.1: )
 Entering state 4
 Stack now 0 6 4
-Return for a new token:
 Reading a token
 Next token is token number (13.2: 2)
 Shifting token number (13.2: 2)
@@ -55714,13 +59681,11 @@
 -> $$ = nterm exp (13.2: 2)
 Entering state 12
 Stack now 0 6 4 12
-Return for a new token:
 Reading a token
 Next token is token '^' (13.3: )
 Shifting token '^' (13.3: )
 Entering state 24
 Stack now 0 6 4 12 24
-Return for a new token:
 Reading a token
 Next token is token number (13.4: 2)
 Shifting token number (13.4: 2)
@@ -55731,7 +59696,6 @@
 -> $$ = nterm exp (13.4: 2)
 Entering state 33
 Stack now 0 6 4 12 24 33
-Return for a new token:
 Reading a token
 Next token is token ')' (13.5: )
 Reducing stack by rule 12 (line 125):
@@ -55752,13 +59716,11 @@
 -> $$ = nterm exp (13.1-5: 4)
 Entering state 8
 Stack now 0 6 8
-Return for a new token:
 Reading a token
 Next token is token '^' (13.6: )
 Shifting token '^' (13.6: )
 Entering state 24
 Stack now 0 6 8 24
-Return for a new token:
 Reading a token
 Next token is token number (13.7: 3)
 Shifting token number (13.7: 3)
@@ -55769,7 +59731,6 @@
 -> $$ = nterm exp (13.7: 3)
 Entering state 33
 Stack now 0 6 8 24 33
-Return for a new token:
 Reading a token
 Next token is token '=' (13.9: )
 Reducing stack by rule 12 (line 125):
@@ -55783,7 +59744,6 @@
 Shifting token '=' (13.9: )
 Entering state 19
 Stack now 0 6 8 19
-Return for a new token:
 Reading a token
 Next token is token number (13.11-12: 64)
 Shifting token number (13.11-12: 64)
@@ -55794,7 +59754,6 @@
 -> $$ = nterm exp (13.11-12: 64)
 Entering state 28
 Stack now 0 6 8 19 28
-Return for a new token:
 Reading a token
 Next token is token '\n' (13.13-14.0: )
 Reducing stack by rule 6 (line 102):
@@ -55820,7 +59779,6 @@
 -> $$ = nterm input (1.1-14.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Now at end of input.
 Shifting token end of file (14.1: )
@@ -55829,7 +59787,23 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
-./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Now at end of input.
+1.1: syntax error, unexpected end of file
+Cleanup: discarding lookahead token end of file (1.1: )
+Stack now 0
+input:
+./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+input:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1357:  $PREPARSER ./calc  input
+  | (!!) + (1 2) = 1
+./calc.at:1358:  $PREPARSER ./calc  input
+./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -55854,107 +59828,64 @@
 Entering state 21
 Stack now 0 4 12 21
 Reading a token
-Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
-Entering state 1
-Stack now 0 4 12 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 30
-Stack now 0 4 12 21 30
-Reading a token
-Next token is token ')' (1.7: )
-Reducing stack by rule 7 (line 118):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
 Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
 Next token is token ')' (1.7: )
 Shifting token ')' (1.7: )
-Entering state 27
-Stack now 0 4 12 27
-Reducing stack by rule 13 (line 138):
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 139):
    $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
+   $2 = token error (1.2-6: )
    $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 23
-Stack now 0 8 23
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Stack now 0 8 23 4
-Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
-Entering state 1
-Stack now 0 8 23 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Stack now 0 8 23 4 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 20
-Stack now 0 8 23 4 12 20
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Stack now 0 8 19
 Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
 Entering state 1
-Stack now 0 8 23 4 12 20 1
+Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 29
-Stack now 0 8 23 4 12 20 29
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack by rule 8 (line 119):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Stack now 0 8 23 4 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 27
-Stack now 0 8 23 4 12 27
-Reducing stack by rule 13 (line 138):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 32
-Stack now 0 8 23 32
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Stack now 0 8 19 28
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 10 (line 121):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -55973,6 +59904,7 @@
 Cleanup: popping nterm input (1.1-2.0: )
 stderr:
 stderr:
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -55981,96 +59913,133 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Stack now 0 4
+Return for a new token:
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-8: )
-Entering state 11
-Stack now 0 4 11
+Next token is token '!' (1.2: )
+Shifting token '!' (1.2: )
+Entering state 5
+Stack now 0 4 5
+Return for a new token:
 Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
-Error: popping token error (1.2-8: )
+Next token is token '!' (1.3: )
+Shifting token '!' (1.3: )
+Entering state 16
+Stack now 0 4 5 16
+Reducing stack by rule 16 (line 129):
+   $1 = token '!' (1.2: )
+   $2 = token '!' (1.3: )
 Stack now 0 4
-Shifting token error (1.2-10: )
+Shifting token error (1.2-3: )
 Entering state 11
 Stack now 0 4 11
+Return for a new token:
 Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
+Next token is token ')' (1.4: )
+Shifting token ')' (1.4: )
 Entering state 26
 Stack now 0 4 11 26
 Reducing stack by rule 14 (line 127):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
+   $2 = token error (1.2-3: )
+   $3 = token ')' (1.4: )
+-> $$ = nterm exp (1.1-4: 1111)
 Entering state 8
 Stack now 0 8
+Return for a new token:
 Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
+Next token is token '+' (1.6: )
+Shifting token '+' (1.6: )
+Entering state 21
+Stack now 0 8 21
+Return for a new token:
+Reading a token
+Next token is token '(' (1.8: )
+Shifting token '(' (1.8: )
+Entering state 4
+Stack now 0 8 21 4
+Return for a new token:
+Reading a token
+Next token is token number (1.9: 1)
+Shifting token number (1.9: 1)
+Entering state 1
+Stack now 0 8 21 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 1)
+-> $$ = nterm exp (1.9: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Return for a new token:
+Reading a token
+Next token is token number (1.11: 2)
+1.11: syntax error, unexpected number
+Error: popping nterm exp (1.9: 1)
+Stack now 0 8 21 4
+Shifting token error (1.9-11: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token number (1.11: 2)
+Error: discarding token number (1.11: 2)
+Error: popping token error (1.9-11: )
+Stack now 0 8 21 4
+Shifting token error (1.9-11: )
+Entering state 11
+Stack now 0 8 21 4 11
+Return for a new token:
+Reading a token
+Next token is token ')' (1.12: )
+Shifting token ')' (1.12: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.8: )
+   $2 = token error (1.9-11: )
+   $3 = token ')' (1.12: )
+-> $$ = nterm exp (1.8-12: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Return for a new token:
+Reading a token
+Next token is token '=' (1.14: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-4: 1111)
+   $2 = token '+' (1.6: )
+   $3 = nterm exp (1.8-12: 1111)
+-> $$ = nterm exp (1.1-12: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.14: )
+Shifting token '=' (1.14: )
 Entering state 19
 Stack now 0 8 19
+Return for a new token:
 Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
 Entering state 1
 Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
 Entering state 28
 Stack now 0 8 19 28
+Return for a new token:
 Reading a token
-Next token is token '\n' (1.19-2.0: )
+Next token is token '\n' (1.17-2.0: )
 Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
+   $1 = nterm exp (1.1-12: 2222)
+   $2 = token '=' (1.14: )
+   $3 = nterm exp (1.16: 1)
+1.1-16: error: 2222 != 1
+-> $$ = nterm exp (1.1-16: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -56079,6 +60048,7 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
+Return for a new token:
 Reading a token
 Now at end of input.
 Shifting token end of file (2.1: )
@@ -56087,17 +60057,16 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1355: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Now at end of input.
+1.1: syntax error, unexpected end of file
+Cleanup: discarding lookahead token end of file (1.1: )
+Stack now 0
 Starting parse
 Entering state 0
 Stack now 0
@@ -56111,13 +60080,11 @@
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Stack now 0 8
-Return for a new token:
 Reading a token
 Next token is token '+' (1.3: )
 Shifting token '+' (1.3: )
 Entering state 21
 Stack now 0 8 21
-Return for a new token:
 Reading a token
 Next token is token number (1.5: 2)
 Shifting token number (1.5: 2)
@@ -56128,13 +60095,11 @@
 -> $$ = nterm exp (1.5: 2)
 Entering state 30
 Stack now 0 8 21 30
-Return for a new token:
 Reading a token
 Next token is token '*' (1.7: )
 Shifting token '*' (1.7: )
 Entering state 22
 Stack now 0 8 21 30 22
-Return for a new token:
 Reading a token
 Next token is token number (1.9: 3)
 Shifting token number (1.9: 3)
@@ -56145,7 +60110,6 @@
 -> $$ = nterm exp (1.9: 3)
 Entering state 31
 Stack now 0 8 21 30 22 31
-Return for a new token:
 Reading a token
 Next token is token '=' (1.11: )
 Reducing stack by rule 9 (line 114):
@@ -56167,7 +60131,6 @@
 Shifting token '=' (1.11: )
 Entering state 19
 Stack now 0 8 19
-Return for a new token:
 Reading a token
 Next token is token number (1.13: 7)
 Shifting token number (1.13: 7)
@@ -56178,7 +60141,6 @@
 -> $$ = nterm exp (1.13: 7)
 Entering state 28
 Stack now 0 8 19 28
-Return for a new token:
 Reading a token
 Next token is token '\n' (1.14-2.0: )
 Reducing stack by rule 6 (line 102):
@@ -56203,7 +60165,6 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token number (2.1: 1)
 Shifting token number (2.1: 1)
@@ -56214,13 +60175,11 @@
 -> $$ = nterm exp (2.1: 1)
 Entering state 8
 Stack now 0 6 8
-Return for a new token:
 Reading a token
 Next token is token '+' (2.3: )
 Shifting token '+' (2.3: )
 Entering state 21
 Stack now 0 6 8 21
-Return for a new token:
 Reading a token
 Next token is token number (2.5: 2)
 Shifting token number (2.5: 2)
@@ -56231,19 +60190,16 @@
 -> $$ = nterm exp (2.5: 2)
 Entering state 30
 Stack now 0 6 8 21 30
-Return for a new token:
 Reading a token
 Next token is token '*' (2.7: )
 Shifting token '*' (2.7: )
 Entering state 22
 Stack now 0 6 8 21 30 22
-Return for a new token:
 Reading a token
 Next token is token '-' (2.9: )
 Shifting token '-' (2.9: )
 Entering state 2
 Stack now 0 6 8 21 30 22 2
-Return for a new token:
 Reading a token
 Next token is token number (2.10: 3)
 Shifting token number (2.10: 3)
@@ -56254,7 +60210,6 @@
 -> $$ = nterm exp (2.10: 3)
 Entering state 10
 Stack now 0 6 8 21 30 22 2 10
-Return for a new token:
 Reading a token
 Next token is token '=' (2.12: )
 Reducing stack by rule 11 (line 124):
@@ -56283,13 +60238,11 @@
 Shifting token '=' (2.12: )
 Entering state 19
 Stack now 0 6 8 19
-Return for a new token:
 Reading a token
 Next token is token '-' (2.14: )
 Shifting token '-' (2.14: )
 Entering state 2
 Stack now 0 6 8 19 2
-Return for a new token:
 Reading a token
 Next token is token number (2.15: 5)
 Shifting token number (2.15: 5)
@@ -56300,7 +60253,6 @@
 -> $$ = nterm exp (2.15: 5)
 Entering state 10
 Stack now 0 6 8 19 2 10
-Return for a new token:
 Reading a token
 Next token is token '\n' (2.16-3.0: )
 Reducing stack by rule 11 (line 124):
@@ -56333,7 +60285,6 @@
 -> $$ = nterm input (1.1-3.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token '\n' (3.1-4.0: )
 Shifting token '\n' (3.1-4.0: )
@@ -56350,13 +60301,11 @@
 -> $$ = nterm input (1.1-4.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token '-' (4.1: )
 Shifting token '-' (4.1: )
 Entering state 2
 Stack now 0 6 2
-Return for a new token:
 Reading a token
 Next token is token number (4.2: 1)
 Shifting token number (4.2: 1)
@@ -56367,13 +60316,11 @@
 -> $$ = nterm exp (4.2: 1)
 Entering state 10
 Stack now 0 6 2 10
-Return for a new token:
 Reading a token
 Next token is token '^' (4.3: )
 Shifting token '^' (4.3: )
 Entering state 24
 Stack now 0 6 2 10 24
-Return for a new token:
 Reading a token
 Next token is token number (4.4: 2)
 Shifting token number (4.4: 2)
@@ -56384,7 +60331,6 @@
 -> $$ = nterm exp (4.4: 2)
 Entering state 33
 Stack now 0 6 2 10 24 33
-Return for a new token:
 Reading a token
 Next token is token '=' (4.6: )
 Reducing stack by rule 12 (line 125):
@@ -56405,13 +60351,11 @@
 Shifting token '=' (4.6: )
 Entering state 19
 Stack now 0 6 8 19
-Return for a new token:
 Reading a token
 Next token is token '-' (4.8: )
 Shifting token '-' (4.8: )
 Entering state 2
 Stack now 0 6 8 19 2
-Return for a new token:
 Reading a token
 Next token is token number (4.9: 1)
 Shifting token number (4.9: 1)
@@ -56422,7 +60366,6 @@
 -> $$ = nterm exp (4.9: 1)
 Entering state 10
 Stack now 0 6 8 19 2 10
-Return for a new token:
 Reading a token
 Next token is token '\n' (4.10-5.0: )
 Reducing stack by rule 11 (line 124):
@@ -56455,19 +60398,16 @@
 -> $$ = nterm input (1.1-5.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token '(' (5.1: )
 Shifting token '(' (5.1: )
 Entering state 4
 Stack now 0 6 4
-Return for a new token:
 Reading a token
 Next token is token '-' (5.2: )
 Shifting token '-' (5.2: )
 Entering state 2
 Stack now 0 6 4 2
-Return for a new token:
 Reading a token
 Next token is token number (5.3: 1)
 Shifting token number (5.3: 1)
@@ -56478,7 +60418,6 @@
 -> $$ = nterm exp (5.3: 1)
 Entering state 10
 Stack now 0 6 4 2 10
-Return for a new token:
 Reading a token
 Next token is token ')' (5.4: )
 Reducing stack by rule 11 (line 124):
@@ -56498,13 +60437,11 @@
 -> $$ = nterm exp (5.1-4: -1)
 Entering state 8
 Stack now 0 6 8
-Return for a new token:
 Reading a token
 Next token is token '^' (5.5: )
 Shifting token '^' (5.5: )
 Entering state 24
 Stack now 0 6 8 24
-Return for a new token:
 Reading a token
 Next token is token number (5.6: 2)
 Shifting token number (5.6: 2)
@@ -56515,7 +60452,6 @@
 -> $$ = nterm exp (5.6: 2)
 Entering state 33
 Stack now 0 6 8 24 33
-Return for a new token:
 Reading a token
 Next token is token '=' (5.8: )
 Reducing stack by rule 12 (line 125):
@@ -56529,7 +60465,6 @@
 Shifting token '=' (5.8: )
 Entering state 19
 Stack now 0 6 8 19
-Return for a new token:
 Reading a token
 Next token is token number (5.10: 1)
 Shifting token number (5.10: 1)
@@ -56540,7 +60475,6 @@
 -> $$ = nterm exp (5.10: 1)
 Entering state 28
 Stack now 0 6 8 19 28
-Return for a new token:
 Reading a token
 Next token is token '\n' (5.11-6.0: )
 Reducing stack by rule 6 (line 102):
@@ -56566,7 +60500,6 @@
 -> $$ = nterm input (1.1-6.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token '\n' (6.1-7.0: )
 Shifting token '\n' (6.1-7.0: )
@@ -56583,25 +60516,21 @@
 -> $$ = nterm input (1.1-7.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token '-' (7.1: )
 Shifting token '-' (7.1: )
 Entering state 2
 Stack now 0 6 2
-Return for a new token:
 Reading a token
 Next token is token '-' (7.2: )
 Shifting token '-' (7.2: )
 Entering state 2
 Stack now 0 6 2 2
-Return for a new token:
 Reading a token
 Next token is token '-' (7.3: )
 Shifting token '-' (7.3: )
 Entering state 2
 Stack now 0 6 2 2 2
-Return for a new token:
 Reading a token
 Next token is token number (7.4: 1)
 Shifting token number (7.4: 1)
@@ -56612,7 +60541,6 @@
 -> $$ = nterm exp (7.4: 1)
 Entering state 10
 Stack now 0 6 2 2 2 10
-Return for a new token:
 Reading a token
 Next token is token '=' (7.6: )
 Reducing stack by rule 11 (line 124):
@@ -56639,13 +60567,11 @@
 Shifting token '=' (7.6: )
 Entering state 19
 Stack now 0 6 8 19
-Return for a new token:
 Reading a token
 Next token is token '-' (7.8: )
 Shifting token '-' (7.8: )
 Entering state 2
 Stack now 0 6 8 19 2
-Return for a new token:
 Reading a token
 Next token is token number (7.9: 1)
 Shifting token number (7.9: 1)
@@ -56656,7 +60582,6 @@
 -> $$ = nterm exp (7.9: 1)
 Entering state 10
 Stack now 0 6 8 19 2 10
-Return for a new token:
 Reading a token
 Next token is token '\n' (7.10-8.0: )
 Reducing stack by rule 11 (line 124):
@@ -56689,7 +60614,6 @@
 -> $$ = nterm input (1.1-8.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token '\n' (8.1-9.0: )
 Shifting token '\n' (8.1-9.0: )
@@ -56706,7 +60630,6 @@
 -> $$ = nterm input (1.1-9.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token number (9.1: 1)
 Shifting token number (9.1: 1)
@@ -56717,13 +60640,11 @@
 -> $$ = nterm exp (9.1: 1)
 Entering state 8
 Stack now 0 6 8
-Return for a new token:
 Reading a token
 Next token is token '-' (9.3: )
 Shifting token '-' (9.3: )
 Entering state 20
 Stack now 0 6 8 20
-Return for a new token:
 Reading a token
 Next token is token number (9.5: 2)
 Shifting token number (9.5: 2)
@@ -56734,7 +60655,6 @@
 -> $$ = nterm exp (9.5: 2)
 Entering state 29
 Stack now 0 6 8 20 29
-Return for a new token:
 Reading a token
 Next token is token '-' (9.7: )
 Reducing stack by rule 8 (line 113):
@@ -56748,7 +60668,6 @@
 Shifting token '-' (9.7: )
 Entering state 20
 Stack now 0 6 8 20
-Return for a new token:
 Reading a token
 Next token is token number (9.9: 3)
 Shifting token number (9.9: 3)
@@ -56759,7 +60678,6 @@
 -> $$ = nterm exp (9.9: 3)
 Entering state 29
 Stack now 0 6 8 20 29
-Return for a new token:
 Reading a token
 Next token is token '=' (9.11: )
 Reducing stack by rule 8 (line 113):
@@ -56773,13 +60691,11 @@
 Shifting token '=' (9.11: )
 Entering state 19
 Stack now 0 6 8 19
-Return for a new token:
 Reading a token
 Next token is token '-' (9.13: )
 Shifting token '-' (9.13: )
 Entering state 2
 Stack now 0 6 8 19 2
-Return for a new token:
 Reading a token
 Next token is token number (9.14: 4)
 Shifting token number (9.14: 4)
@@ -56790,7 +60706,6 @@
 -> $$ = nterm exp (9.14: 4)
 Entering state 10
 Stack now 0 6 8 19 2 10
-Return for a new token:
 Reading a token
 Next token is token '\n' (9.15-10.0: )
 Reducing stack by rule 11 (line 124):
@@ -56823,7 +60738,6 @@
 -> $$ = nterm input (1.1-10.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token number (10.1: 1)
 Shifting token number (10.1: 1)
@@ -56834,19 +60748,16 @@
 -> $$ = nterm exp (10.1: 1)
 Entering state 8
 Stack now 0 6 8
-Return for a new token:
 Reading a token
 Next token is token '-' (10.3: )
 Shifting token '-' (10.3: )
 Entering state 20
 Stack now 0 6 8 20
-Return for a new token:
 Reading a token
 Next token is token '(' (10.5: )
 Shifting token '(' (10.5: )
 Entering state 4
 Stack now 0 6 8 20 4
-Return for a new token:
 Reading a token
 Next token is token number (10.6: 2)
 Shifting token number (10.6: 2)
@@ -56857,13 +60768,11 @@
 -> $$ = nterm exp (10.6: 2)
 Entering state 12
 Stack now 0 6 8 20 4 12
-Return for a new token:
 Reading a token
 Next token is token '-' (10.8: )
 Shifting token '-' (10.8: )
 Entering state 20
 Stack now 0 6 8 20 4 12 20
-Return for a new token:
 Reading a token
 Next token is token number (10.10: 3)
 Shifting token number (10.10: 3)
@@ -56874,7 +60783,6 @@
 -> $$ = nterm exp (10.10: 3)
 Entering state 29
 Stack now 0 6 8 20 4 12 20 29
-Return for a new token:
 Reading a token
 Next token is token ')' (10.11: )
 Reducing stack by rule 8 (line 113):
@@ -56895,7 +60803,6 @@
 -> $$ = nterm exp (10.5-11: -1)
 Entering state 29
 Stack now 0 6 8 20 29
-Return for a new token:
 Reading a token
 Next token is token '=' (10.13: )
 Reducing stack by rule 8 (line 113):
@@ -56909,7 +60816,6 @@
 Shifting token '=' (10.13: )
 Entering state 19
 Stack now 0 6 8 19
-Return for a new token:
 Reading a token
 Next token is token number (10.15: 2)
 Shifting token number (10.15: 2)
@@ -56920,7 +60826,6 @@
 -> $$ = nterm exp (10.15: 2)
 Entering state 28
 Stack now 0 6 8 19 28
-Return for a new token:
 Reading a token
 Next token is token '\n' (10.16-11.0: )
 Reducing stack by rule 6 (line 102):
@@ -56946,7 +60851,6 @@
 -> $$ = nterm input (1.1-11.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token '\n' (11.1-12.0: )
 Shifting token '\n' (11.1-12.0: )
@@ -56963,7 +60867,6 @@
 -> $$ = nterm input (1.1-12.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token number (12.1: 2)
 Shifting token number (12.1: 2)
@@ -56974,13 +60877,11 @@
 -> $$ = nterm exp (12.1: 2)
 Entering state 8
 Stack now 0 6 8
-Return for a new token:
 Reading a token
 Next token is token '^' (12.2: )
 Shifting token '^' (12.2: )
 Entering state 24
 Stack now 0 6 8 24
-Return for a new token:
 Reading a token
 Next token is token number (12.3: 2)
 Shifting token number (12.3: 2)
@@ -56991,13 +60892,11 @@
 -> $$ = nterm exp (12.3: 2)
 Entering state 33
 Stack now 0 6 8 24 33
-Return for a new token:
 Reading a token
 Next token is token '^' (12.4: )
 Shifting token '^' (12.4: )
 Entering state 24
 Stack now 0 6 8 24 33 24
-Return for a new token:
 Reading a token
 Next token is token number (12.5: 3)
 Shifting token number (12.5: 3)
@@ -57008,7 +60907,6 @@
 -> $$ = nterm exp (12.5: 3)
 Entering state 33
 Stack now 0 6 8 24 33 24 33
-Return for a new token:
 Reading a token
 Next token is token '=' (12.7: )
 Reducing stack by rule 12 (line 125):
@@ -57030,7 +60928,6 @@
 Shifting token '=' (12.7: )
 Entering state 19
 Stack now 0 6 8 19
-Return for a new token:
 Reading a token
 Next token is token number (12.9-11: 256)
 Shifting token number (12.9-11: 256)
@@ -57041,7 +60938,6 @@
 -> $$ = nterm exp (12.9-11: 256)
 Entering state 28
 Stack now 0 6 8 19 28
-Return for a new token:
 Reading a token
 Next token is token '\n' (12.12-13.0: )
 Reducing stack by rule 6 (line 102):
@@ -57067,13 +60963,11 @@
 -> $$ = nterm input (1.1-13.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Next token is token '(' (13.1: )
 Shifting token '(' (13.1: )
 Entering state 4
 Stack now 0 6 4
-Return for a new token:
 Reading a token
 Next token is token number (13.2: 2)
 Shifting token number (13.2: 2)
@@ -57084,13 +60978,11 @@
 -> $$ = nterm exp (13.2: 2)
 Entering state 12
 Stack now 0 6 4 12
-Return for a new token:
 Reading a token
 Next token is token '^' (13.3: )
 Shifting token '^' (13.3: )
 Entering state 24
 Stack now 0 6 4 12 24
-Return for a new token:
 Reading a token
 Next token is token number (13.4: 2)
 Shifting token number (13.4: 2)
@@ -57101,7 +60993,6 @@
 -> $$ = nterm exp (13.4: 2)
 Entering state 33
 Stack now 0 6 4 12 24 33
-Return for a new token:
 Reading a token
 Next token is token ')' (13.5: )
 Reducing stack by rule 12 (line 125):
@@ -57122,13 +61013,11 @@
 -> $$ = nterm exp (13.1-5: 4)
 Entering state 8
 Stack now 0 6 8
-Return for a new token:
 Reading a token
 Next token is token '^' (13.6: )
 Shifting token '^' (13.6: )
 Entering state 24
 Stack now 0 6 8 24
-Return for a new token:
 Reading a token
 Next token is token number (13.7: 3)
 Shifting token number (13.7: 3)
@@ -57139,7 +61028,6 @@
 -> $$ = nterm exp (13.7: 3)
 Entering state 33
 Stack now 0 6 8 24 33
-Return for a new token:
 Reading a token
 Next token is token '=' (13.9: )
 Reducing stack by rule 12 (line 125):
@@ -57153,7 +61041,6 @@
 Shifting token '=' (13.9: )
 Entering state 19
 Stack now 0 6 8 19
-Return for a new token:
 Reading a token
 Next token is token number (13.11-12: 64)
 Shifting token number (13.11-12: 64)
@@ -57164,7 +61051,6 @@
 -> $$ = nterm exp (13.11-12: 64)
 Entering state 28
 Stack now 0 6 8 19 28
-Return for a new token:
 Reading a token
 Next token is token '\n' (13.13-14.0: )
 Reducing stack by rule 6 (line 102):
@@ -57190,7 +61076,6 @@
 -> $$ = nterm input (1.1-14.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Now at end of input.
 Shifting token end of file (14.1: )
@@ -57199,9 +61084,8 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
-./calc.at:1358: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 stderr:
-./calc.at:1355: cat stderr
+./calc.at:1362: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -57210,96 +61094,133 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Stack now 0 4
+Return for a new token:
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-8: )
-Entering state 11
-Stack now 0 4 11
+Next token is token '!' (1.2: )
+Shifting token '!' (1.2: )
+Entering state 5
+Stack now 0 4 5
+Return for a new token:
 Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
-Error: popping token error (1.2-8: )
+Next token is token '!' (1.3: )
+Shifting token '!' (1.3: )
+Entering state 16
+Stack now 0 4 5 16
+Reducing stack by rule 16 (line 129):
+   $1 = token '!' (1.2: )
+   $2 = token '!' (1.3: )
 Stack now 0 4
-Shifting token error (1.2-10: )
+Shifting token error (1.2-3: )
 Entering state 11
 Stack now 0 4 11
+Return for a new token:
 Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
+Next token is token ')' (1.4: )
+Shifting token ')' (1.4: )
 Entering state 26
 Stack now 0 4 11 26
 Reducing stack by rule 14 (line 127):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
+   $2 = token error (1.2-3: )
+   $3 = token ')' (1.4: )
+-> $$ = nterm exp (1.1-4: 1111)
 Entering state 8
 Stack now 0 8
+Return for a new token:
 Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
+Next token is token '+' (1.6: )
+Shifting token '+' (1.6: )
+Entering state 21
+Stack now 0 8 21
+Return for a new token:
+Reading a token
+Next token is token '(' (1.8: )
+Shifting token '(' (1.8: )
+Entering state 4
+Stack now 0 8 21 4
+Return for a new token:
+Reading a token
+Next token is token number (1.9: 1)
+Shifting token number (1.9: 1)
+Entering state 1
+Stack now 0 8 21 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 1)
+-> $$ = nterm exp (1.9: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Return for a new token:
+Reading a token
+Next token is token number (1.11: 2)
+1.11: syntax error, unexpected number
+Error: popping nterm exp (1.9: 1)
+Stack now 0 8 21 4
+Shifting token error (1.9-11: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token number (1.11: 2)
+Error: discarding token number (1.11: 2)
+Error: popping token error (1.9-11: )
+Stack now 0 8 21 4
+Shifting token error (1.9-11: )
+Entering state 11
+Stack now 0 8 21 4 11
+Return for a new token:
+Reading a token
+Next token is token ')' (1.12: )
+Shifting token ')' (1.12: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.8: )
+   $2 = token error (1.9-11: )
+   $3 = token ')' (1.12: )
+-> $$ = nterm exp (1.8-12: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Return for a new token:
+Reading a token
+Next token is token '=' (1.14: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-4: 1111)
+   $2 = token '+' (1.6: )
+   $3 = nterm exp (1.8-12: 1111)
+-> $$ = nterm exp (1.1-12: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.14: )
+Shifting token '=' (1.14: )
 Entering state 19
 Stack now 0 8 19
+Return for a new token:
 Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
 Entering state 1
 Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
 Entering state 28
 Stack now 0 8 19 28
+Return for a new token:
 Reading a token
-Next token is token '\n' (1.19-2.0: )
+Next token is token '\n' (1.17-2.0: )
 Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
+   $1 = nterm exp (1.1-12: 2222)
+   $2 = token '=' (1.14: )
+   $3 = nterm exp (1.16: 1)
+1.1-16: error: 2222 != 1
+-> $$ = nterm exp (1.1-16: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -57308,6 +61229,7 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
+Return for a new token:
 Reading a token
 Now at end of input.
 Shifting token end of file (2.1: )
@@ -57316,10 +61238,6 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-input:
-  | 1 2
-./calc.at:1358:  $PREPARSER ./calc  input
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -57333,16 +61251,75 @@
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Stack now 0 8
-Return for a new token:
 Reading a token
-Next token is token number (1.3: 2)
-1.3: syntax error, unexpected number
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token number (1.3: 2)
-Stack now 0
-./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1357: "$PERL" -pi -e 'use strict;
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Stack now 0 8 21 5 13
+Reducing stack by rule 18 (line 131):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1355: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -57352,34 +61329,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-486. calc.at:1355:  ok
-./calc.at:1357: cat stderr
+./calc.at:1355: cat stderr
 stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Return for a new token:
-Reading a token
-Next token is token number (1.3: 2)
-1.3: syntax error, unexpected number
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token number (1.3: 2)
-Stack now 0
 input:
-
-  | (1 + 1) / (1 - 1)
-./calc.at:1357:  $PREPARSER ./calc  input
 ./calc.at:1358: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -57390,294 +61342,90 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
 Entering state 1
-Stack now 0 4 12 21 1
+Stack now 0 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 30
-Stack now 0 4 12 21 30
-Reading a token
-Next token is token ')' (1.7: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 27
-Stack now 0 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 23
-Stack now 0 8 23
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Stack now 0 8 23 4
-Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
-Entering state 1
-Stack now 0 8 23 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Stack now 0 8 23 4 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 20
-Stack now 0 8 23 4 12 20
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
 Entering state 1
-Stack now 0 8 23 4 12 20 1
+Stack now 0 8 21 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 29
-Stack now 0 8 23 4 12 20 29
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack by rule 8 (line 113):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Stack now 0 8 23 4 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 27
-Stack now 0 8 23 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 32
-Stack now 0 8 23 32
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 10 (line 115):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1358: cat stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
 Entering state 1
-Stack now 0 4 1
+Stack now 0 8 21 30 22 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
 Reading a token
-Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
-Entering state 1
-Stack now 0 4 12 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.6: 1)
--> $$ = nterm exp (1.6: 1)
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
 Entering state 30
-Stack now 0 4 12 21 30
-Reading a token
-Next token is token ')' (1.7: )
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
 Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 27
-Stack now 0 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
 Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 23
-Stack now 0 8 23
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Stack now 0 8 23 4
-Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
-Entering state 1
-Stack now 0 8 23 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Stack now 0 8 23 4 12
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
 Reading a token
 Next token is token '-' (1.14: )
 Shifting token '-' (1.14: )
-Entering state 20
-Stack now 0 8 23 4 12 20
-Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
-Entering state 1
-Stack now 0 8 23 4 12 20 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 29
-Stack now 0 8 23 4 12 20 29
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack by rule 8 (line 113):
-   $1 = nterm exp (1.12: 1)
+Entering state 13
+Stack now 0 8 21 5 13
+Reducing stack by rule 18 (line 131):
+   $1 = token '!' (1.13: )
    $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Stack now 0 8 23 4 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 27
-Stack now 0 8 23 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 32
-Stack now 0 8 23 32
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 10 (line 115):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-input:
-./calc.at:1357: "$PERL" -pi -e 'use strict;
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1358: cat stderr
+  | 1 2
+./calc.at:1362:  $PREPARSER ./calc  input
+./calc.at:1360: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -57687,50 +61435,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | 1//2
-./calc.at:1358:  $PREPARSER ./calc  input
-./calc.at:1357: cat stderr
-stderr:
-490. calc.at:1362: testing Calculator %no-lines api.pure parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Return for a new token:
-Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 23
-Stack now 0 8 23
-Return for a new token:
-Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '/' (1.3: )
-Stack now 0
-./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1362:
-    if "$POSIXLY_CORRECT_IS_EXPORTED"; then
-      sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y
-    else
-      mv calc.y.tmp calc.y
-    fi
-
-
 stderr:
+./calc.at:1360: cat stderr
+input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -57744,25 +61451,14 @@
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Stack now 0 8
-Return for a new token:
-Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 23
-Stack now 0 8 23
-Return for a new token:
 Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Stack now 0 8
+Next token is token number (1.3: 2)
+1.3: syntax error, unexpected number
 Error: popping nterm exp (1.1: 1)
 Stack now 0
-Cleanup: discarding lookahead token '/' (1.3: )
+Cleanup: discarding lookahead token number (1.3: 2)
 Stack now 0
-487. calc.at:1357:  ok
-./calc.at:1362: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-./calc.at:1358: "$PERL" -pi -e 'use strict;
+./calc.at:1357: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -57772,55 +61468,17 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-
-./calc.at:1358: cat stderr
 input:
-  | error
+./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (- *) + (1 2) = 1
+  | (# + 1) = 1111
+./calc.at:1355:  $PREPARSER ./calc  input
 ./calc.at:1358:  $PREPARSER ./calc  input
+./calc.at:1357: cat stderr
 stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token invalid token (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token (1.1: )
-Stack now 0
-./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token invalid token (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token (1.1: )
-Stack now 0
-./calc.at:1358: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-491. calc.at:1363: testing Calculator %no-lines api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
-./calc.at:1358: cat stderr
-./calc.at:1363:
-    if "$POSIXLY_CORRECT_IS_EXPORTED"; then
-      sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y
-    else
-      mv calc.y.tmp calc.y
-    fi
-
-
 input:
-./calc.at:1363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-  | 1 = 2 = 3
-./calc.at:1358:  $PREPARSER ./calc  input
-stderr:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1360:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -57834,107 +61492,94 @@
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Stack now 0 8
-Return for a new token:
-Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
-Entering state 19
-Stack now 0 8 19
-Return for a new token:
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 28
-Stack now 0 8 19 28
-Return for a new token:
 Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Stack now 0 8 19
-Error: popping token '=' (1.3: )
-Stack now 0 8
+Next token is token number (1.3: 2)
+1.3: syntax error, unexpected number
 Error: popping nterm exp (1.1: 1)
 Stack now 0
-Cleanup: discarding lookahead token '=' (1.7: )
+Cleanup: discarding lookahead token number (1.3: 2)
 Stack now 0
-./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.6: 1)
+Error: discarding token number (1.6: 1)
+Error: popping token error (1.2-4: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 139):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
 Stack now 0 8
-Return for a new token:
 Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
 Entering state 19
 Stack now 0 8 19
-Return for a new token:
 Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
 Entering state 1
 Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
 Entering state 28
 Stack now 0 8 19 28
-Return for a new token:
 Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Stack now 0 8 19
-Error: popping token '=' (1.3: )
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
 Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '=' (1.7: )
-Stack now 0
-./calc.at:1362: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
-./calc.at:1358: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1358: cat stderr
-input:
-  | 
-  | +1
-./calc.at:1358:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1363: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Stack now 0 3
-Reducing stack by rule 3 (line 96):
-   $1 = token '\n' (1.1-2.0: )
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -57943,26 +61588,159 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Stack now 0
-Cleanup: discarding lookahead token '+' (2.1: )
-Stack now 0
-./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 stderr:
+./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Stack now 0 3
-Reducing stack by rule 3 (line 96):
-   $1 = token '\n' (1.1-2.0: )
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Return for a new token:
+Reading a token
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
+Entering state 2
+Stack now 0 4 2
+Return for a new token:
+Reading a token
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Stack now 0 4 2 9
+Reducing stack by rule 15 (line 128):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Error: popping token error (1.2-4: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Return for a new token:
+Reading a token
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
+Stack now 0 8
+Return for a new token:
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
+Stack now 0 8 21
+Return for a new token:
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Stack now 0 8 21 4
+Return for a new token:
+Reading a token
+Next token is token number (1.10: 1)
+Shifting token number (1.10: 1)
+Entering state 1
+Stack now 0 8 21 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Return for a new token:
+Reading a token
+Next token is token number (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
+Stack now 0 8 21 4
+Shifting token error (1.10-12: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token number (1.12: 2)
+Error: discarding token number (1.12: 2)
+Error: popping token error (1.10-12: )
+Stack now 0 8 21 4
+Shifting token error (1.10-12: )
+Entering state 11
+Stack now 0 8 21 4 11
+Return for a new token:
+Reading a token
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Return for a new token:
+Reading a token
+Next token is token '=' (1.15: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
+Entering state 19
+Stack now 0 8 19
+Return for a new token:
+Reading a token
+Next token is token number (1.17: 1)
+Shifting token number (1.17: 1)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
+Entering state 28
+Stack now 0 8 19 28
+Return for a new token:
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -57973,58 +61751,15 @@
 Stack now 0 6
 Return for a new token:
 Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Stack now 0
-Cleanup: discarding lookahead token '+' (2.1: )
-Stack now 0
-./calc.at:1358: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1358: cat stderr
-./calc.at:1358:  $PREPARSER ./calc  /dev/null
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
 Now at end of input.
-1.1: syntax error, unexpected end of file
-Cleanup: discarding lookahead token end of file (1.1: )
-Stack now 0
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of file
-Cleanup: discarding lookahead token end of file (1.1: )
-Stack now 0
-./calc.at:1358: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1358: cat stderr
 input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1358:  $PREPARSER ./calc  input
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -58033,7 +61768,6 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Stack now 0 4
-Return for a new token:
 Reading a token
 Next token is token ')' (1.2: )
 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
@@ -58051,19 +61785,16 @@
 -> $$ = nterm exp (1.1-2: 1111)
 Entering state 8
 Stack now 0 8
-Return for a new token:
 Reading a token
 Next token is token '+' (1.4: )
 Shifting token '+' (1.4: )
 Entering state 21
 Stack now 0 8 21
-Return for a new token:
 Reading a token
 Next token is token '(' (1.6: )
 Shifting token '(' (1.6: )
 Entering state 4
 Stack now 0 8 21 4
-Return for a new token:
 Reading a token
 Next token is token number (1.7: 1)
 Shifting token number (1.7: 1)
@@ -58074,13 +61805,11 @@
 -> $$ = nterm exp (1.7: 1)
 Entering state 12
 Stack now 0 8 21 4 12
-Return for a new token:
 Reading a token
 Next token is token '+' (1.9: )
 Shifting token '+' (1.9: )
 Entering state 21
 Stack now 0 8 21 4 12 21
-Return for a new token:
 Reading a token
 Next token is token number (1.11: 1)
 Shifting token number (1.11: 1)
@@ -58091,7 +61820,6 @@
 -> $$ = nterm exp (1.11: 1)
 Entering state 30
 Stack now 0 8 21 4 12 21 30
-Return for a new token:
 Reading a token
 Next token is token '+' (1.13: )
 Reducing stack by rule 7 (line 112):
@@ -58105,7 +61833,6 @@
 Shifting token '+' (1.13: )
 Entering state 21
 Stack now 0 8 21 4 12 21
-Return for a new token:
 Reading a token
 Next token is token number (1.15: 1)
 Shifting token number (1.15: 1)
@@ -58116,7 +61843,6 @@
 -> $$ = nterm exp (1.15: 1)
 Entering state 30
 Stack now 0 8 21 4 12 21 30
-Return for a new token:
 Reading a token
 Next token is token '+' (1.17: )
 Reducing stack by rule 7 (line 112):
@@ -58130,7 +61856,6 @@
 Shifting token '+' (1.17: )
 Entering state 21
 Stack now 0 8 21 4 12 21
-Return for a new token:
 Reading a token
 Next token is token ')' (1.18: )
 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
@@ -58152,7 +61877,6 @@
 -> $$ = nterm exp (1.6-18: 1111)
 Entering state 30
 Stack now 0 8 21 30
-Return for a new token:
 Reading a token
 Next token is token '+' (1.20: )
 Reducing stack by rule 7 (line 112):
@@ -58166,13 +61890,11 @@
 Shifting token '+' (1.20: )
 Entering state 21
 Stack now 0 8 21
-Return for a new token:
 Reading a token
 Next token is token '(' (1.22: )
 Shifting token '(' (1.22: )
 Entering state 4
 Stack now 0 8 21 4
-Return for a new token:
 Reading a token
 Next token is token '*' (1.23: )
 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
@@ -58186,7 +61908,6 @@
 Shifting token error (1.23: )
 Entering state 11
 Stack now 0 8 21 4 11
-Return for a new token:
 Reading a token
 Next token is token '*' (1.25: )
 Error: discarding token '*' (1.25: )
@@ -58195,7 +61916,6 @@
 Shifting token error (1.23-25: )
 Entering state 11
 Stack now 0 8 21 4 11
-Return for a new token:
 Reading a token
 Next token is token '*' (1.27: )
 Error: discarding token '*' (1.27: )
@@ -58204,7 +61924,6 @@
 Shifting token error (1.23-27: )
 Entering state 11
 Stack now 0 8 21 4 11
-Return for a new token:
 Reading a token
 Next token is token ')' (1.28: )
 Shifting token ')' (1.28: )
@@ -58217,7 +61936,6 @@
 -> $$ = nterm exp (1.22-28: 1111)
 Entering state 30
 Stack now 0 8 21 30
-Return for a new token:
 Reading a token
 Next token is token '+' (1.30: )
 Reducing stack by rule 7 (line 112):
@@ -58231,13 +61949,11 @@
 Shifting token '+' (1.30: )
 Entering state 21
 Stack now 0 8 21
-Return for a new token:
 Reading a token
 Next token is token '(' (1.32: )
 Shifting token '(' (1.32: )
 Entering state 4
 Stack now 0 8 21 4
-Return for a new token:
 Reading a token
 Next token is token number (1.33: 1)
 Shifting token number (1.33: 1)
@@ -58248,13 +61964,11 @@
 -> $$ = nterm exp (1.33: 1)
 Entering state 12
 Stack now 0 8 21 4 12
-Return for a new token:
 Reading a token
 Next token is token '*' (1.35: )
 Shifting token '*' (1.35: )
 Entering state 22
 Stack now 0 8 21 4 12 22
-Return for a new token:
 Reading a token
 Next token is token number (1.37: 2)
 Shifting token number (1.37: 2)
@@ -58265,7 +61979,6 @@
 -> $$ = nterm exp (1.37: 2)
 Entering state 31
 Stack now 0 8 21 4 12 22 31
-Return for a new token:
 Reading a token
 Next token is token '*' (1.39: )
 Reducing stack by rule 9 (line 114):
@@ -58279,7 +61992,6 @@
 Shifting token '*' (1.39: )
 Entering state 22
 Stack now 0 8 21 4 12 22
-Return for a new token:
 Reading a token
 Next token is token '*' (1.41: )
 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
@@ -58297,7 +62009,6 @@
 Shifting token error (1.33-41: )
 Entering state 11
 Stack now 0 8 21 4 11
-Return for a new token:
 Reading a token
 Next token is token ')' (1.42: )
 Shifting token ')' (1.42: )
@@ -58310,7 +62021,6 @@
 -> $$ = nterm exp (1.32-42: 1111)
 Entering state 30
 Stack now 0 8 21 30
-Return for a new token:
 Reading a token
 Next token is token '=' (1.44: )
 Reducing stack by rule 7 (line 112):
@@ -58324,7 +62034,6 @@
 Shifting token '=' (1.44: )
 Entering state 19
 Stack now 0 8 19
-Return for a new token:
 Reading a token
 Next token is token number (1.46: 1)
 Shifting token number (1.46: 1)
@@ -58335,7 +62044,6 @@
 -> $$ = nterm exp (1.46: 1)
 Entering state 28
 Stack now 0 8 19 28
-Return for a new token:
 Reading a token
 Next token is token '\n' (1.47-2.0: )
 Reducing stack by rule 6 (line 102):
@@ -58361,7 +62069,6 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Now at end of input.
 Shifting token end of file (2.1: )
@@ -58370,8 +62077,11 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 + !* ++
+./calc.at:1357:  $PREPARSER ./calc  input
+stderr:
 stderr:
+./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -58382,6 +62092,344 @@
 Stack now 0 4
 Return for a new token:
 Reading a token
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
+Entering state 2
+Stack now 0 4 2
+Return for a new token:
+Reading a token
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Stack now 0 4 2 9
+Reducing stack by rule 15 (line 128):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Error: popping token error (1.2-4: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Return for a new token:
+Reading a token
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
+Stack now 0 8
+Return for a new token:
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
+Stack now 0 8 21
+Return for a new token:
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Stack now 0 8 21 4
+Return for a new token:
+Reading a token
+Next token is token number (1.10: 1)
+Shifting token number (1.10: 1)
+Entering state 1
+Stack now 0 8 21 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Return for a new token:
+Reading a token
+Next token is token number (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
+Stack now 0 8 21 4
+Shifting token error (1.10-12: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token number (1.12: 2)
+Error: discarding token number (1.12: 2)
+Error: popping token error (1.10-12: )
+Stack now 0 8 21 4
+Shifting token error (1.10-12: )
+Entering state 11
+Stack now 0 8 21 4 11
+Return for a new token:
+Reading a token
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Return for a new token:
+Reading a token
+Next token is token '=' (1.15: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
+Entering state 19
+Stack now 0 8 19
+Return for a new token:
+Reading a token
+Next token is token number (1.17: 1)
+Shifting token number (1.17: 1)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
+Entering state 28
+Stack now 0 8 19 28
+Return for a new token:
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.6: 1)
+Error: discarding token number (1.6: 1)
+Error: popping token error (1.2-4: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 139):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
+Reading a token
+Next token is token '*' (1.14: )
+Shifting token '*' (1.14: )
+Entering state 15
+Stack now 0 8 21 5 15
+Reducing stack by rule 19 (line 132):
+   $1 = token '!' (1.13: )
+   $2 = token '*' (1.14: )
+1.14: memory exhausted
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
 Next token is token ')' (1.2: )
 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
 Shifting token error (1.2: )
@@ -58398,19 +62446,16 @@
 -> $$ = nterm exp (1.1-2: 1111)
 Entering state 8
 Stack now 0 8
-Return for a new token:
 Reading a token
 Next token is token '+' (1.4: )
 Shifting token '+' (1.4: )
 Entering state 21
 Stack now 0 8 21
-Return for a new token:
 Reading a token
 Next token is token '(' (1.6: )
 Shifting token '(' (1.6: )
 Entering state 4
 Stack now 0 8 21 4
-Return for a new token:
 Reading a token
 Next token is token number (1.7: 1)
 Shifting token number (1.7: 1)
@@ -58421,13 +62466,11 @@
 -> $$ = nterm exp (1.7: 1)
 Entering state 12
 Stack now 0 8 21 4 12
-Return for a new token:
 Reading a token
 Next token is token '+' (1.9: )
 Shifting token '+' (1.9: )
 Entering state 21
 Stack now 0 8 21 4 12 21
-Return for a new token:
 Reading a token
 Next token is token number (1.11: 1)
 Shifting token number (1.11: 1)
@@ -58438,7 +62481,6 @@
 -> $$ = nterm exp (1.11: 1)
 Entering state 30
 Stack now 0 8 21 4 12 21 30
-Return for a new token:
 Reading a token
 Next token is token '+' (1.13: )
 Reducing stack by rule 7 (line 112):
@@ -58452,7 +62494,6 @@
 Shifting token '+' (1.13: )
 Entering state 21
 Stack now 0 8 21 4 12 21
-Return for a new token:
 Reading a token
 Next token is token number (1.15: 1)
 Shifting token number (1.15: 1)
@@ -58463,7 +62504,6 @@
 -> $$ = nterm exp (1.15: 1)
 Entering state 30
 Stack now 0 8 21 4 12 21 30
-Return for a new token:
 Reading a token
 Next token is token '+' (1.17: )
 Reducing stack by rule 7 (line 112):
@@ -58477,7 +62517,6 @@
 Shifting token '+' (1.17: )
 Entering state 21
 Stack now 0 8 21 4 12 21
-Return for a new token:
 Reading a token
 Next token is token ')' (1.18: )
 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
@@ -58499,7 +62538,6 @@
 -> $$ = nterm exp (1.6-18: 1111)
 Entering state 30
 Stack now 0 8 21 30
-Return for a new token:
 Reading a token
 Next token is token '+' (1.20: )
 Reducing stack by rule 7 (line 112):
@@ -58513,13 +62551,11 @@
 Shifting token '+' (1.20: )
 Entering state 21
 Stack now 0 8 21
-Return for a new token:
 Reading a token
 Next token is token '(' (1.22: )
 Shifting token '(' (1.22: )
 Entering state 4
 Stack now 0 8 21 4
-Return for a new token:
 Reading a token
 Next token is token '*' (1.23: )
 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
@@ -58533,7 +62569,6 @@
 Shifting token error (1.23: )
 Entering state 11
 Stack now 0 8 21 4 11
-Return for a new token:
 Reading a token
 Next token is token '*' (1.25: )
 Error: discarding token '*' (1.25: )
@@ -58542,7 +62577,6 @@
 Shifting token error (1.23-25: )
 Entering state 11
 Stack now 0 8 21 4 11
-Return for a new token:
 Reading a token
 Next token is token '*' (1.27: )
 Error: discarding token '*' (1.27: )
@@ -58551,7 +62585,6 @@
 Shifting token error (1.23-27: )
 Entering state 11
 Stack now 0 8 21 4 11
-Return for a new token:
 Reading a token
 Next token is token ')' (1.28: )
 Shifting token ')' (1.28: )
@@ -58564,7 +62597,6 @@
 -> $$ = nterm exp (1.22-28: 1111)
 Entering state 30
 Stack now 0 8 21 30
-Return for a new token:
 Reading a token
 Next token is token '+' (1.30: )
 Reducing stack by rule 7 (line 112):
@@ -58578,13 +62610,11 @@
 Shifting token '+' (1.30: )
 Entering state 21
 Stack now 0 8 21
-Return for a new token:
 Reading a token
 Next token is token '(' (1.32: )
 Shifting token '(' (1.32: )
 Entering state 4
 Stack now 0 8 21 4
-Return for a new token:
 Reading a token
 Next token is token number (1.33: 1)
 Shifting token number (1.33: 1)
@@ -58595,13 +62625,11 @@
 -> $$ = nterm exp (1.33: 1)
 Entering state 12
 Stack now 0 8 21 4 12
-Return for a new token:
 Reading a token
 Next token is token '*' (1.35: )
 Shifting token '*' (1.35: )
 Entering state 22
 Stack now 0 8 21 4 12 22
-Return for a new token:
 Reading a token
 Next token is token number (1.37: 2)
 Shifting token number (1.37: 2)
@@ -58612,7 +62640,6 @@
 -> $$ = nterm exp (1.37: 2)
 Entering state 31
 Stack now 0 8 21 4 12 22 31
-Return for a new token:
 Reading a token
 Next token is token '*' (1.39: )
 Reducing stack by rule 9 (line 114):
@@ -58626,7 +62653,6 @@
 Shifting token '*' (1.39: )
 Entering state 22
 Stack now 0 8 21 4 12 22
-Return for a new token:
 Reading a token
 Next token is token '*' (1.41: )
 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
@@ -58644,7 +62670,6 @@
 Shifting token error (1.33-41: )
 Entering state 11
 Stack now 0 8 21 4 11
-Return for a new token:
 Reading a token
 Next token is token ')' (1.42: )
 Shifting token ')' (1.42: )
@@ -58657,7 +62682,6 @@
 -> $$ = nterm exp (1.32-42: 1111)
 Entering state 30
 Stack now 0 8 21 30
-Return for a new token:
 Reading a token
 Next token is token '=' (1.44: )
 Reducing stack by rule 7 (line 112):
@@ -58671,7 +62695,6 @@
 Shifting token '=' (1.44: )
 Entering state 19
 Stack now 0 8 19
-Return for a new token:
 Reading a token
 Next token is token number (1.46: 1)
 Shifting token number (1.46: 1)
@@ -58682,7 +62705,6 @@
 -> $$ = nterm exp (1.46: 1)
 Entering state 28
 Stack now 0 8 19 28
-Return for a new token:
 Reading a token
 Next token is token '\n' (1.47-2.0: )
 Reducing stack by rule 6 (line 102):
@@ -58708,7 +62730,6 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Now at end of input.
 Shifting token end of file (2.1: )
@@ -58717,6 +62738,119 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1362: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
+Reading a token
+Next token is token '*' (1.14: )
+Shifting token '*' (1.14: )
+Entering state 15
+Stack now 0 8 21 5 15
+Reducing stack by rule 19 (line 132):
+   $1 = token '!' (1.13: )
+   $2 = token '*' (1.14: )
+1.14: memory exhausted
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1360: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1355: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1360: cat stderr
 ./calc.at:1358: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -58727,11 +62861,24 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1358: cat stderr
 input:
+./calc.at:1362: cat stderr
+./calc.at:1358: cat stderr
   | (!!) + (1 2) = 1
-./calc.at:1358:  $PREPARSER ./calc  input
+./calc.at:1360:  $PREPARSER ./calc  input
+./calc.at:1355: cat stderr
 stderr:
+./calc.at:1357: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1357: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -58740,13 +62887,11 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Stack now 0 4
-Return for a new token:
 Reading a token
 Next token is token '!' (1.2: )
 Shifting token '!' (1.2: )
 Entering state 5
 Stack now 0 4 5
-Return for a new token:
 Reading a token
 Next token is token '!' (1.3: )
 Shifting token '!' (1.3: )
@@ -58759,7 +62904,6 @@
 Shifting token error (1.2-3: )
 Entering state 11
 Stack now 0 4 11
-Return for a new token:
 Reading a token
 Next token is token ')' (1.4: )
 Shifting token ')' (1.4: )
@@ -58772,19 +62916,16 @@
 -> $$ = nterm exp (1.1-4: 1111)
 Entering state 8
 Stack now 0 8
-Return for a new token:
 Reading a token
 Next token is token '+' (1.6: )
 Shifting token '+' (1.6: )
 Entering state 21
 Stack now 0 8 21
-Return for a new token:
 Reading a token
 Next token is token '(' (1.8: )
 Shifting token '(' (1.8: )
 Entering state 4
 Stack now 0 8 21 4
-Return for a new token:
 Reading a token
 Next token is token number (1.9: 1)
 Shifting token number (1.9: 1)
@@ -58795,7 +62936,6 @@
 -> $$ = nterm exp (1.9: 1)
 Entering state 12
 Stack now 0 8 21 4 12
-Return for a new token:
 Reading a token
 Next token is token number (1.11: 2)
 1.11: syntax error, unexpected number
@@ -58811,7 +62951,6 @@
 Shifting token error (1.9-11: )
 Entering state 11
 Stack now 0 8 21 4 11
-Return for a new token:
 Reading a token
 Next token is token ')' (1.12: )
 Shifting token ')' (1.12: )
@@ -58824,7 +62963,6 @@
 -> $$ = nterm exp (1.8-12: 1111)
 Entering state 30
 Stack now 0 8 21 30
-Return for a new token:
 Reading a token
 Next token is token '=' (1.14: )
 Reducing stack by rule 7 (line 112):
@@ -58838,7 +62976,6 @@
 Shifting token '=' (1.14: )
 Entering state 19
 Stack now 0 8 19
-Return for a new token:
 Reading a token
 Next token is token number (1.16: 1)
 Shifting token number (1.16: 1)
@@ -58849,7 +62986,6 @@
 -> $$ = nterm exp (1.16: 1)
 Entering state 28
 Stack now 0 8 19 28
-Return for a new token:
 Reading a token
 Next token is token '\n' (1.17-2.0: )
 Reducing stack by rule 6 (line 102):
@@ -58875,7 +63011,6 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Now at end of input.
 Shifting token end of file (2.1: )
@@ -58884,23 +63019,62 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+input:
+  | 1//2
+./calc.at:1362:  $PREPARSER ./calc  input
+input:
+input:
+  | (1 + # + 1) = 1111
+./calc.at:1355:  $PREPARSER ./calc  input
+  | (* *) + (*) + (*)
+./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1358:  $PREPARSER ./calc  input
+stderr:
+stderr:
+  | (#) + (#) = 2222
+./calc.at:1357:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 23
+Stack now 0 8 23
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '/' (1.3: )
+Stack now 0
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
 Next token is token '(' (1.1: )
 Shifting token '(' (1.1: )
 Entering state 4
 Stack now 0 4
-Return for a new token:
 Reading a token
 Next token is token '!' (1.2: )
 Shifting token '!' (1.2: )
 Entering state 5
 Stack now 0 4 5
-Return for a new token:
 Reading a token
 Next token is token '!' (1.3: )
 Shifting token '!' (1.3: )
@@ -58913,7 +63087,6 @@
 Shifting token error (1.2-3: )
 Entering state 11
 Stack now 0 4 11
-Return for a new token:
 Reading a token
 Next token is token ')' (1.4: )
 Shifting token ')' (1.4: )
@@ -58926,19 +63099,16 @@
 -> $$ = nterm exp (1.1-4: 1111)
 Entering state 8
 Stack now 0 8
-Return for a new token:
 Reading a token
 Next token is token '+' (1.6: )
 Shifting token '+' (1.6: )
 Entering state 21
 Stack now 0 8 21
-Return for a new token:
 Reading a token
 Next token is token '(' (1.8: )
 Shifting token '(' (1.8: )
 Entering state 4
 Stack now 0 8 21 4
-Return for a new token:
 Reading a token
 Next token is token number (1.9: 1)
 Shifting token number (1.9: 1)
@@ -58949,7 +63119,6 @@
 -> $$ = nterm exp (1.9: 1)
 Entering state 12
 Stack now 0 8 21 4 12
-Return for a new token:
 Reading a token
 Next token is token number (1.11: 2)
 1.11: syntax error, unexpected number
@@ -58965,7 +63134,6 @@
 Shifting token error (1.9-11: )
 Entering state 11
 Stack now 0 8 21 4 11
-Return for a new token:
 Reading a token
 Next token is token ')' (1.12: )
 Shifting token ')' (1.12: )
@@ -58978,7 +63146,6 @@
 -> $$ = nterm exp (1.8-12: 1111)
 Entering state 30
 Stack now 0 8 21 30
-Return for a new token:
 Reading a token
 Next token is token '=' (1.14: )
 Reducing stack by rule 7 (line 112):
@@ -58992,7 +63159,6 @@
 Shifting token '=' (1.14: )
 Entering state 19
 Stack now 0 8 19
-Return for a new token:
 Reading a token
 Next token is token number (1.16: 1)
 Shifting token number (1.16: 1)
@@ -59003,7 +63169,6 @@
 -> $$ = nterm exp (1.16: 1)
 Entering state 28
 Stack now 0 8 19 28
-Return for a new token:
 Reading a token
 Next token is token '\n' (1.17-2.0: )
 Reducing stack by rule 6 (line 102):
@@ -59029,7 +63194,6 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Now at end of input.
 Shifting token end of file (2.1: )
@@ -59038,20 +63202,7 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1358: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1358: cat stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1358:  $PREPARSER ./calc  input
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -59063,27 +63214,23 @@
 Stack now 0 4
 Return for a new token:
 Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
-Entering state 2
-Stack now 0 4 2
-Return for a new token:
-Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Stack now 0 4 2 9
-Reducing stack by rule 15 (line 128):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
+Error: popping token error (1.2: )
 Stack now 0 4
-Shifting token error (1.2-4: )
+Shifting token error (1.2: )
 Entering state 11
 Stack now 0 4 11
+Return for a new token:
+Reading a token
 Next token is token '*' (1.4: )
 Error: discarding token '*' (1.4: )
-Error: popping token error (1.2-4: )
+Error: popping token error (1.2: )
 Stack now 0 4
 Shifting token error (1.2-4: )
 Entering state 11
@@ -59115,78 +63262,86 @@
 Stack now 0 8 21 4
 Return for a new token:
 Reading a token
-Next token is token number (1.10: 1)
-Shifting token number (1.10: 1)
-Entering state 1
-Stack now 0 8 21 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.10: 1)
--> $$ = nterm exp (1.10: 1)
-Entering state 12
-Stack now 0 8 21 4 12
-Return for a new token:
-Reading a token
-Next token is token number (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Stack now 0 8 21 4
-Shifting token error (1.10-12: )
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
 Entering state 11
 Stack now 0 8 21 4 11
-Next token is token number (1.12: 2)
-Error: discarding token number (1.12: 2)
-Error: popping token error (1.10-12: )
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
+Error: popping token error (1.10: )
 Stack now 0 8 21 4
-Shifting token error (1.10-12: )
+Shifting token error (1.10: )
 Entering state 11
 Stack now 0 8 21 4 11
 Return for a new token:
 Reading a token
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
 Entering state 26
 Stack now 0 8 21 4 11 26
 Reducing stack by rule 14 (line 127):
    $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
+   $2 = token error (1.10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.9-11: 1111)
 Entering state 30
 Stack now 0 8 21 30
 Return for a new token:
 Reading a token
-Next token is token '=' (1.15: )
+Next token is token '+' (1.13: )
 Reducing stack by rule 7 (line 112):
    $1 = nterm exp (1.1-5: 1111)
    $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
-Entering state 19
-Stack now 0 8 19
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
+Stack now 0 8 21
 Return for a new token:
 Reading a token
-Next token is token number (1.17: 1)
-Shifting token number (1.17: 1)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.17: 1)
--> $$ = nterm exp (1.17: 1)
-Entering state 28
-Stack now 0 8 19 28
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
+Stack now 0 8 21 4
+Return for a new token:
+Reading a token
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
+Error: popping token error (1.16: )
+Stack now 0 8 21 4
+Shifting token error (1.16: )
+Entering state 11
+Stack now 0 8 21 4 11
+Return for a new token:
+Reading a token
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 30
+Stack now 0 8 21 30
 Return for a new token:
 Reading a token
 Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
 Entering state 8
 Stack now 0 8
 Next token is token '\n' (1.18-2.0: )
@@ -59194,7 +63349,7 @@
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 2222)
+   $1 = nterm exp (1.1-17: 3333)
    $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
@@ -59214,7 +63369,6 @@
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -59223,141 +63377,103 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Stack now 0 4
-Return for a new token:
-Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
-Entering state 2
-Stack now 0 4 2
-Return for a new token:
 Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Stack now 0 4 2 9
-Reducing stack by rule 15 (line 128):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
-Stack now 0 4
-Shifting token error (1.2-4: )
+1.2: syntax error: invalid character: '#'
+Shifting token error (1.2: )
 Entering state 11
 Stack now 0 4 11
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Error: popping token error (1.2-4: )
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.2: )
 Stack now 0 4
-Shifting token error (1.2-4: )
+Shifting token error (1.2: )
 Entering state 11
 Stack now 0 4 11
-Return for a new token:
 Reading a token
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
 Entering state 26
 Stack now 0 4 11 26
 Reducing stack by rule 14 (line 127):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
+   $2 = token error (1.2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
 Entering state 8
 Stack now 0 8
-Return for a new token:
 Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
 Entering state 21
 Stack now 0 8 21
-Return for a new token:
 Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
 Entering state 4
 Stack now 0 8 21 4
-Return for a new token:
-Reading a token
-Next token is token number (1.10: 1)
-Shifting token number (1.10: 1)
-Entering state 1
-Stack now 0 8 21 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.10: 1)
--> $$ = nterm exp (1.10: 1)
-Entering state 12
-Stack now 0 8 21 4 12
-Return for a new token:
 Reading a token
-Next token is token number (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Stack now 0 8 21 4
-Shifting token error (1.10-12: )
+1.8: syntax error: invalid character: '#'
+Shifting token error (1.8: )
 Entering state 11
 Stack now 0 8 21 4 11
-Next token is token number (1.12: 2)
-Error: discarding token number (1.12: 2)
-Error: popping token error (1.10-12: )
+Next token is token invalid token (1.8: )
+Error: discarding token invalid token (1.8: )
+Error: popping token error (1.8: )
 Stack now 0 8 21 4
-Shifting token error (1.10-12: )
+Shifting token error (1.8: )
 Entering state 11
 Stack now 0 8 21 4 11
-Return for a new token:
 Reading a token
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
 Entering state 26
 Stack now 0 8 21 4 11 26
 Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
+   $1 = token '(' (1.7: )
+   $2 = token error (1.8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
 Entering state 30
 Stack now 0 8 21 30
-Return for a new token:
 Reading a token
-Next token is token '=' (1.15: )
+Next token is token '=' (1.11: )
 Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
 Entering state 19
 Stack now 0 8 19
-Return for a new token:
 Reading a token
-Next token is token number (1.17: 1)
-Shifting token number (1.17: 1)
+Next token is token number (1.13-16: 2222)
+Shifting token number (1.13-16: 2222)
 Entering state 1
 Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.17: 1)
--> $$ = nterm exp (1.17: 1)
+   $1 = token number (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
 Entering state 28
 Stack now 0 8 19 28
-Return for a new token:
 Reading a token
-Next token is token '\n' (1.18-2.0: )
+Next token is token '\n' (1.17-2.0: )
 Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 2222)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -59366,7 +63482,6 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Now at end of input.
 Shifting token end of file (2.1: )
@@ -59375,20 +63490,122 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1358: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1358: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1358:  $PREPARSER ./calc  input
+./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-8: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.10: 1)
+Error: discarding token number (1.10: 1)
+Error: popping token error (1.2-8: )
+Stack now 0 4
+Shifting token error (1.2-10: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 139):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -59554,8 +63771,19 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1360: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Stack now 0
@@ -59564,145 +63792,217 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Stack now 0 4
-Return for a new token:
 Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.2: syntax error: invalid character: '#'
 Shifting token error (1.2: )
 Entering state 11
 Stack now 0 4 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
 Error: popping token error (1.2: )
 Stack now 0 4
 Shifting token error (1.2: )
 Entering state 11
 Stack now 0 4 11
-Return for a new token:
-Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Return for a new token:
 Reading a token
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
 Entering state 26
 Stack now 0 4 11 26
 Reducing stack by rule 14 (line 127):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
+   $2 = token error (1.2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
 Entering state 8
 Stack now 0 8
-Return for a new token:
 Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
 Entering state 21
 Stack now 0 8 21
-Return for a new token:
 Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
 Entering state 4
 Stack now 0 8 21 4
-Return for a new token:
 Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
+1.8: syntax error: invalid character: '#'
+Shifting token error (1.8: )
 Entering state 11
 Stack now 0 8 21 4 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
-Error: popping token error (1.10: )
+Next token is token invalid token (1.8: )
+Error: discarding token invalid token (1.8: )
+Error: popping token error (1.8: )
 Stack now 0 8 21 4
-Shifting token error (1.10: )
+Shifting token error (1.8: )
 Entering state 11
 Stack now 0 8 21 4 11
-Return for a new token:
 Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
 Entering state 26
 Stack now 0 8 21 4 11 26
 Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
+   $1 = token '(' (1.7: )
+   $2 = token error (1.8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
 Entering state 30
 Stack now 0 8 21 30
-Return for a new token:
 Reading a token
-Next token is token '+' (1.13: )
+Next token is token '=' (1.11: )
 Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Stack now 0 8 21
-Return for a new token:
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 19
+Stack now 0 8 19
 Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
+Next token is token number (1.13-16: 2222)
+Shifting token number (1.13-16: 2222)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
 Entering state 4
-Stack now 0 8 21 4
-Return for a new token:
+Stack now 0 4
 Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
 Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
-Error: popping token error (1.16: )
-Stack now 0 8 21 4
-Shifting token error (1.16: )
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
 Entering state 11
-Stack now 0 8 21 4 11
-Return for a new token:
+Stack now 0 4 11
 Reading a token
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-8: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.10: 1)
+Error: discarding token number (1.10: 1)
+Error: popping token error (1.2-8: )
+Stack now 0 4
+Shifting token error (1.2-10: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
 Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Return for a new token:
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 139):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Stack now 0 8
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 3333)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -59711,7 +64011,6 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Now at end of input.
 Shifting token end of file (2.1: )
@@ -59720,6 +64019,7 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+stderr:
 ./calc.at:1358: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -59730,11 +64030,75 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1360: cat stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 23
+Stack now 0 8 23
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '/' (1.3: )
+Stack now 0
 ./calc.at:1358: cat stderr
+./calc.at:1357: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
 input:
+./calc.at:1355: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1357: cat stderr
   | 1 + 2 * 3 + !+ ++
 ./calc.at:1358:  $PREPARSER ./calc  input
+./calc.at:1362: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | (- *) + (1 2) = 1
 stderr:
+./calc.at:1360:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1355: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -59822,100 +64186,160 @@
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1362: cat stderr
 ./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+input:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
+Entering state 2
+Stack now 0 4 2
+Reading a token
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Stack now 0 4 2 9
+Reducing stack by rule 15 (line 128):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Error: popping token error (1.2-4: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
 Entering state 8
 Stack now 0 8
-Return for a new token:
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
 Entering state 21
 Stack now 0 8 21
-Return for a new token:
 Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+Next token is token number (1.10: 1)
+Shifting token number (1.10: 1)
 Entering state 1
-Stack now 0 8 21 1
+Stack now 0 8 21 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
+   $1 = token number (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Reading a token
+Next token is token number (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
+Stack now 0 8 21 4
+Shifting token error (1.10-12: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token number (1.12: 2)
+Error: discarding token number (1.12: 2)
+Error: popping token error (1.10-12: )
+Stack now 0 8 21 4
+Shifting token error (1.10-12: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
 Entering state 30
 Stack now 0 8 21 30
-Return for a new token:
 Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
-Return for a new token:
+Next token is token '=' (1.15: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
+Entering state 19
+Stack now 0 8 19
 Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
+Next token is token number (1.17: 1)
+Shifting token number (1.17: 1)
 Entering state 1
-Stack now 0 8 21 30 22 1
+Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Return for a new token:
+   $1 = token number (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
+Entering state 28
+Stack now 0 8 19 28
 Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 114):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Stack now 0 8 21
-Return for a new token:
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
-Return for a new token:
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
 Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Stack now 0 8 21 5 14
-Reducing stack by rule 17 (line 130):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1358: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1358:  $PREPARSER ./calc  input
-stderr:
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+  | (1 + #) = 1111
+./calc.at:1357:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -59993,107 +64417,386 @@
 Stack now 0 8 21 5
 Return for a new token:
 Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Stack now 0 8 21 5 13
-Reducing stack by rule 18 (line 131):
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Stack now 0 8 21 5 14
+Reducing stack by rule 17 (line 130):
    $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
+   $2 = token '+' (1.14: )
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+./calc.at:1358: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
 Entering state 1
-Stack now 0 1
+Stack now 0 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Return for a new token:
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
 Entering state 21
-Stack now 0 8 21
-Return for a new token:
+Stack now 0 4 12 21
 Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Stack now 0 8 21 30
-Return for a new token:
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
 Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
-Return for a new token:
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Stack now 0 8
 Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
 Entering state 1
-Stack now 0 8 21 30 22 1
+Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Return for a new token:
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Stack now 0 8 19 28
 Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 114):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | error
+stderr:
+input:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
+Entering state 2
+Stack now 0 4 2
+Reading a token
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Stack now 0 4 2 9
+Reducing stack by rule 15 (line 128):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Error: popping token error (1.2-4: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+Next token is token number (1.10: 1)
+Shifting token number (1.10: 1)
+Entering state 1
+Stack now 0 8 21 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Reading a token
+Next token is token number (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
+Stack now 0 8 21 4
+Shifting token error (1.10-12: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token number (1.12: 2)
+Error: discarding token number (1.12: 2)
+Error: popping token error (1.10-12: )
+Stack now 0 8 21 4
+Shifting token error (1.10-12: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
 Entering state 30
 Stack now 0 8 21 30
-Next token is token '+' (1.11: )
+Reading a token
+Next token is token '=' (1.15: )
 Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.17: 1)
+Shifting token number (1.17: 1)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1362:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
 Entering state 21
-Stack now 0 8 21
-Return for a new token:
+Stack now 0 4 12 21
 Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
-Return for a new token:
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
 Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Stack now 0 8 21 5 13
-Reducing stack by rule 18 (line 131):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+input:
 stderr:
-./calc.at:1358: "$PERL" -pi -e 'use strict;
+  | 1 + 2 * 3 + !- ++
+./calc.at:1358:  $PREPARSER ./calc  input
+  | (1 + 1) / (1 - 1)
+./calc.at:1355:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token invalid token (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token (1.1: )
+Stack now 0
+stderr:
+./calc.at:1360: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -60103,25 +64806,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stdout:
-./calc.at:1358: cat stderr
-./calc.at:1360: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
-./calc.at:1360: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c calc.h
-
-input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1358:  $PREPARSER ./calc  input
 stderr:
-input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -60199,37 +64884,330 @@
 Stack now 0 8 21 5
 Return for a new token:
 Reading a token
-Next token is token '*' (1.14: )
-Shifting token '*' (1.14: )
-Entering state 15
-Stack now 0 8 21 5 15
-Reducing stack by rule 19 (line 132):
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Stack now 0 8 21 5 13
+Reducing stack by rule 18 (line 131):
    $1 = token '!' (1.13: )
-   $2 = token '*' (1.14: )
-1.14: memory exhausted
+   $2 = token '-' (1.14: )
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
 ./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
+./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+Next token is token number (1.6: 1)
+Shifting token number (1.6: 1)
+Entering state 1
+Stack now 0 4 12 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 30
+Stack now 0 4 12 21 30
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack by rule 7 (line 118):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 27
+Stack now 0 4 12 27
+Reducing stack by rule 13 (line 138):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 23
+Stack now 0 8 23
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Stack now 0 8 23 4
+Reading a token
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
+Entering state 1
+Stack now 0 8 23 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Stack now 0 8 23 4 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 20
+Stack now 0 8 23 4 12 20
+Reading a token
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Stack now 0 8 23 4 12 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 29
+Stack now 0 8 23 4 12 20 29
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack by rule 8 (line 119):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Stack now 0 8 23 4 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 27
+Stack now 0 8 23 4 12 27
+Reducing stack by rule 13 (line 138):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 32
+Stack now 0 8 23 32
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 10 (line 121):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1360: cat stderr
+input:
+  | (* *) + (*) + (*)
 ./calc.at:1360:  $PREPARSER ./calc  input
+./calc.at:1357: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token invalid token (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token (1.1: )
+Stack now 0
+stderr:
 stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+Next token is token number (1.6: 1)
+Shifting token number (1.6: 1)
+Entering state 1
+Stack now 0 4 12 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 30
+Stack now 0 4 12 21 30
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack by rule 7 (line 118):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 27
+Stack now 0 4 12 27
+Reducing stack by rule 13 (line 138):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 23
+Stack now 0 8 23
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Stack now 0 8 23 4
+Reading a token
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
+Entering state 1
+Stack now 0 8 23 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Stack now 0 8 23 4 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 20
+Stack now 0 8 23 4 12 20
+Reading a token
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Stack now 0 8 23 4 12 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 29
+Stack now 0 8 23 4 12 20 29
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack by rule 8 (line 119):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Stack now 0 8 23 4 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 27
+Stack now 0 8 23 4 12 27
+Reducing stack by rule 13 (line 138):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 32
+Stack now 0 8 23 32
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 10 (line 121):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
 Next token is token number (1.1: 1)
 Shifting token number (1.1: 1)
 Entering state 1
@@ -60303,108 +65281,153 @@
 Stack now 0 8 21 5
 Return for a new token:
 Reading a token
-Next token is token '*' (1.14: )
-Shifting token '*' (1.14: )
-Entering state 15
-Stack now 0 8 21 5 15
-Reducing stack by rule 19 (line 132):
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Stack now 0 8 21 5 13
+Reducing stack by rule 18 (line 131):
    $1 = token '!' (1.13: )
-   $2 = token '*' (1.14: )
-1.14: memory exhausted
+   $2 = token '-' (1.14: )
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
 stderr:
+./calc.at:1357: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
 Entering state 21
 Stack now 0 8 21
 Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Stack now 0 8 21 4
 Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
+Error: popping token error (1.10: )
+Stack now 0 8 21 4
+Shifting token error (1.10: )
+Entering state 11
+Stack now 0 8 21 4 11
 Reading a token
-Next token is token '=' (1.11: )
-Reducing stack by rule 9 (line 114):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.9-11: 1111)
 Entering state 30
 Stack now 0 8 21 30
-Next token is token '=' (1.11: )
+Reading a token
+Next token is token '+' (1.13: )
 Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 19
-Stack now 0 8 19
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
-Next token is token number (1.13: 7)
-Shifting token number (1.13: 7)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.13: 7)
--> $$ = nterm exp (1.13: 7)
-Entering state 28
-Stack now 0 8 19 28
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
+Stack now 0 8 21 4
 Reading a token
-Next token is token '\n' (1.14-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-9: 7)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13: 7)
--> $$ = nterm exp (1.1-13: 7)
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
+Error: popping token error (1.16: )
+Stack now 0 8 21 4
+Shifting token error (1.16: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.14-2.0: )
-Shifting token '\n' (1.14-2.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-13: 7)
-   $2 = token '\n' (1.14-2.0: )
+   $1 = nterm exp (1.1-17: 3333)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -60414,926 +65437,587 @@
 Entering state 6
 Stack now 0 6
 Reading a token
-Next token is token number (2.1: 1)
-Shifting token number (2.1: 1)
-Entering state 1
-Stack now 0 6 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (2.1: 1)
--> $$ = nterm exp (2.1: 1)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '+' (2.3: )
-Shifting token '+' (2.3: )
-Entering state 21
-Stack now 0 6 8 21
-Reading a token
-Next token is token number (2.5: 2)
-Shifting token number (2.5: 2)
-Entering state 1
-Stack now 0 6 8 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (2.5: 2)
--> $$ = nterm exp (2.5: 2)
-Entering state 30
-Stack now 0 6 8 21 30
-Reading a token
-Next token is token '*' (2.7: )
-Shifting token '*' (2.7: )
-Entering state 22
-Stack now 0 6 8 21 30 22
-Reading a token
-Next token is token '-' (2.9: )
-Shifting token '-' (2.9: )
-Entering state 2
-Stack now 0 6 8 21 30 22 2
-Reading a token
-Next token is token number (2.10: 3)
-Shifting token number (2.10: 3)
-Entering state 1
-Stack now 0 6 8 21 30 22 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (2.10: 3)
--> $$ = nterm exp (2.10: 3)
-Entering state 10
-Stack now 0 6 8 21 30 22 2 10
-Reading a token
-Next token is token '=' (2.12: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (2.9: )
-   $2 = nterm exp (2.10: 3)
--> $$ = nterm exp (2.9-10: -3)
-Entering state 31
-Stack now 0 6 8 21 30 22 31
-Next token is token '=' (2.12: )
-Reducing stack by rule 9 (line 114):
-   $1 = nterm exp (2.5: 2)
-   $2 = token '*' (2.7: )
-   $3 = nterm exp (2.9-10: -3)
--> $$ = nterm exp (2.5-10: -6)
-Entering state 30
-Stack now 0 6 8 21 30
-Next token is token '=' (2.12: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (2.1: 1)
-   $2 = token '+' (2.3: )
-   $3 = nterm exp (2.5-10: -6)
--> $$ = nterm exp (2.1-10: -5)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (2.12: )
-Shifting token '=' (2.12: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token '-' (2.14: )
-Shifting token '-' (2.14: )
-Entering state 2
-Stack now 0 6 8 19 2
-Reading a token
-Next token is token number (2.15: 5)
-Shifting token number (2.15: 5)
-Entering state 1
-Stack now 0 6 8 19 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (2.15: 5)
--> $$ = nterm exp (2.15: 5)
-Entering state 10
-Stack now 0 6 8 19 2 10
-Reading a token
-Next token is token '\n' (2.16-3.0: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (2.14: )
-   $2 = nterm exp (2.15: 5)
--> $$ = nterm exp (2.14-15: -5)
-Entering state 28
-Stack now 0 6 8 19 28
-Next token is token '\n' (2.16-3.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (2.1-10: -5)
-   $2 = token '=' (2.12: )
-   $3 = nterm exp (2.14-15: -5)
--> $$ = nterm exp (2.1-15: -5)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (2.16-3.0: )
-Shifting token '\n' (2.16-3.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (2.1-15: -5)
-   $2 = token '\n' (2.16-3.0: )
--> $$ = nterm line (2.1-3.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-2.0: )
-   $2 = nterm line (2.1-3.0: )
--> $$ = nterm input (1.1-3.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '\n' (3.1-4.0: )
-Shifting token '\n' (3.1-4.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 96):
-   $1 = token '\n' (3.1-4.0: )
--> $$ = nterm line (3.1-4.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-3.0: )
-   $2 = nterm line (3.1-4.0: )
--> $$ = nterm input (1.1-4.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '-' (4.1: )
-Shifting token '-' (4.1: )
-Entering state 2
-Stack now 0 6 2
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1358: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1362: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1358: cat stderr
+input:
+./calc.at:1355: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+  | (# + 1) = 1111
+./calc.at:1357:  $PREPARSER ./calc  input
+./calc.at:1362: cat stderr
+./calc.at:1355: cat stderr
+Starting parse
+Entering state 0
+Stack now 0
 Reading a token
-Next token is token number (4.2: 1)
-Shifting token number (4.2: 1)
-Entering state 1
-Stack now 0 6 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (4.2: 1)
--> $$ = nterm exp (4.2: 1)
-Entering state 10
-Stack now 0 6 2 10
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
 Reading a token
-Next token is token '^' (4.3: )
-Shifting token '^' (4.3: )
-Entering state 24
-Stack now 0 6 2 10 24
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
 Reading a token
-Next token is token number (4.4: 2)
-Shifting token number (4.4: 2)
-Entering state 1
-Stack now 0 6 2 10 24 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (4.4: 2)
--> $$ = nterm exp (4.4: 2)
-Entering state 33
-Stack now 0 6 2 10 24 33
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
 Reading a token
-Next token is token '=' (4.6: )
-Reducing stack by rule 12 (line 125):
-   $1 = nterm exp (4.2: 1)
-   $2 = token '^' (4.3: )
-   $3 = nterm exp (4.4: 2)
--> $$ = nterm exp (4.2-4: 1)
-Entering state 10
-Stack now 0 6 2 10
-Next token is token '=' (4.6: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (4.1: )
-   $2 = nterm exp (4.2-4: 1)
--> $$ = nterm exp (4.1-4: -1)
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
 Entering state 8
-Stack now 0 6 8
-Next token is token '=' (4.6: )
-Shifting token '=' (4.6: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token '-' (4.8: )
-Shifting token '-' (4.8: )
-Entering state 2
-Stack now 0 6 8 19 2
-Reading a token
-Next token is token number (4.9: 1)
-Shifting token number (4.9: 1)
-Entering state 1
-Stack now 0 6 8 19 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (4.9: 1)
--> $$ = nterm exp (4.9: 1)
-Entering state 10
-Stack now 0 6 8 19 2 10
+Stack now 0 8
 Reading a token
-Next token is token '\n' (4.10-5.0: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (4.8: )
-   $2 = nterm exp (4.9: 1)
--> $$ = nterm exp (4.8-9: -1)
-Entering state 28
-Stack now 0 6 8 19 28
-Next token is token '\n' (4.10-5.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (4.1-4: -1)
-   $2 = token '=' (4.6: )
-   $3 = nterm exp (4.8-9: -1)
--> $$ = nterm exp (4.1-9: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (4.10-5.0: )
-Shifting token '\n' (4.10-5.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (4.1-9: -1)
-   $2 = token '\n' (4.10-5.0: )
--> $$ = nterm line (4.1-5.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-4.0: )
-   $2 = nterm line (4.1-5.0: )
--> $$ = nterm input (1.1-5.0: )
-Entering state 6
-Stack now 0 6
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
-Next token is token '(' (5.1: )
-Shifting token '(' (5.1: )
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
 Entering state 4
-Stack now 0 6 4
+Stack now 0 8 21 4
 Reading a token
-Next token is token '-' (5.2: )
-Shifting token '-' (5.2: )
-Entering state 2
-Stack now 0 6 4 2
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
+Error: popping token error (1.10: )
+Stack now 0 8 21 4
+Shifting token error (1.10: )
+Entering state 11
+Stack now 0 8 21 4 11
 Reading a token
-Next token is token number (5.3: 1)
-Shifting token number (5.3: 1)
-Entering state 1
-Stack now 0 6 4 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (5.3: 1)
--> $$ = nterm exp (5.3: 1)
-Entering state 10
-Stack now 0 6 4 2 10
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.9-11: 1111)
+Entering state 30
+Stack now 0 8 21 30
 Reading a token
-Next token is token ')' (5.4: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (5.2: )
-   $2 = nterm exp (5.3: 1)
--> $$ = nterm exp (5.2-3: -1)
-Entering state 12
-Stack now 0 6 4 12
-Next token is token ')' (5.4: )
-Shifting token ')' (5.4: )
-Entering state 27
-Stack now 0 6 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (5.1: )
-   $2 = nterm exp (5.2-3: -1)
-   $3 = token ')' (5.4: )
--> $$ = nterm exp (5.1-4: -1)
+Next token is token '+' (1.13: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
 Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '^' (5.5: )
-Shifting token '^' (5.5: )
-Entering state 24
-Stack now 0 6 8 24
+Stack now 0 8
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
-Next token is token number (5.6: 2)
-Shifting token number (5.6: 2)
-Entering state 1
-Stack now 0 6 8 24 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (5.6: 2)
--> $$ = nterm exp (5.6: 2)
-Entering state 33
-Stack now 0 6 8 24 33
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
+Stack now 0 8 21 4
 Reading a token
-Next token is token '=' (5.8: )
-Reducing stack by rule 12 (line 125):
-   $1 = nterm exp (5.1-4: -1)
-   $2 = token '^' (5.5: )
-   $3 = nterm exp (5.6: 2)
--> $$ = nterm exp (5.1-6: 1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (5.8: )
-Shifting token '=' (5.8: )
-Entering state 19
-Stack now 0 6 8 19
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
+Error: popping token error (1.16: )
+Stack now 0 8 21 4
+Shifting token error (1.16: )
+Entering state 11
+Stack now 0 8 21 4 11
 Reading a token
-Next token is token number (5.10: 1)
-Shifting token number (5.10: 1)
-Entering state 1
-Stack now 0 6 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (5.10: 1)
--> $$ = nterm exp (5.10: 1)
-Entering state 28
-Stack now 0 6 8 19 28
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 30
+Stack now 0 8 21 30
 Reading a token
-Next token is token '\n' (5.11-6.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (5.1-6: 1)
-   $2 = token '=' (5.8: )
-   $3 = nterm exp (5.10: 1)
--> $$ = nterm exp (5.1-10: 1)
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
 Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (5.11-6.0: )
-Shifting token '\n' (5.11-6.0: )
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 25
-Stack now 0 6 8 25
+Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (5.1-10: 1)
-   $2 = token '\n' (5.11-6.0: )
--> $$ = nterm line (5.1-6.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-5.0: )
-   $2 = nterm line (5.1-6.0: )
--> $$ = nterm input (1.1-6.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '\n' (6.1-7.0: )
-Shifting token '\n' (6.1-7.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 96):
-   $1 = token '\n' (6.1-7.0: )
--> $$ = nterm line (6.1-7.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-6.0: )
-   $2 = nterm line (6.1-7.0: )
--> $$ = nterm input (1.1-7.0: )
+   $1 = nterm exp (1.1-17: 3333)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
 Reading a token
-Next token is token '-' (7.1: )
-Shifting token '-' (7.1: )
-Entering state 2
-Stack now 0 6 2
-Reading a token
-Next token is token '-' (7.2: )
-Shifting token '-' (7.2: )
-Entering state 2
-Stack now 0 6 2 2
-Reading a token
-Next token is token '-' (7.3: )
-Shifting token '-' (7.3: )
-Entering state 2
-Stack now 0 6 2 2 2
-Reading a token
-Next token is token number (7.4: 1)
-Shifting token number (7.4: 1)
-Entering state 1
-Stack now 0 6 2 2 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (7.4: 1)
--> $$ = nterm exp (7.4: 1)
-Entering state 10
-Stack now 0 6 2 2 2 10
-Reading a token
-Next token is token '=' (7.6: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (7.3: )
-   $2 = nterm exp (7.4: 1)
--> $$ = nterm exp (7.3-4: -1)
-Entering state 10
-Stack now 0 6 2 2 10
-Next token is token '=' (7.6: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (7.2: )
-   $2 = nterm exp (7.3-4: -1)
--> $$ = nterm exp (7.2-4: 1)
-Entering state 10
-Stack now 0 6 2 10
-Next token is token '=' (7.6: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (7.1: )
-   $2 = nterm exp (7.2-4: 1)
--> $$ = nterm exp (7.1-4: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (7.6: )
-Shifting token '=' (7.6: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token '-' (7.8: )
-Shifting token '-' (7.8: )
-Entering state 2
-Stack now 0 6 8 19 2
-Reading a token
-Next token is token number (7.9: 1)
-Shifting token number (7.9: 1)
-Entering state 1
-Stack now 0 6 8 19 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (7.9: 1)
--> $$ = nterm exp (7.9: 1)
-Entering state 10
-Stack now 0 6 8 19 2 10
-Reading a token
-Next token is token '\n' (7.10-8.0: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (7.8: )
-   $2 = nterm exp (7.9: 1)
--> $$ = nterm exp (7.8-9: -1)
-Entering state 28
-Stack now 0 6 8 19 28
-Next token is token '\n' (7.10-8.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (7.1-4: -1)
-   $2 = token '=' (7.6: )
-   $3 = nterm exp (7.8-9: -1)
--> $$ = nterm exp (7.1-9: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (7.10-8.0: )
-Shifting token '\n' (7.10-8.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (7.1-9: -1)
-   $2 = token '\n' (7.10-8.0: )
--> $$ = nterm line (7.1-8.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-7.0: )
-   $2 = nterm line (7.1-8.0: )
--> $$ = nterm input (1.1-8.0: )
-Entering state 6
-Stack now 0 6
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+486. calc.at:1355:  ok
+stderr:
+input:
+Starting parse
+Entering state 0
+Stack now 0
 Reading a token
-Next token is token '\n' (8.1-9.0: )
-Shifting token '\n' (8.1-9.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 96):
-   $1 = token '\n' (8.1-9.0: )
--> $$ = nterm line (8.1-9.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-8.0: )
-   $2 = nterm line (8.1-9.0: )
--> $$ = nterm input (1.1-9.0: )
-Entering state 6
-Stack now 0 6
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
 Reading a token
-Next token is token number (9.1: 1)
-Shifting token number (9.1: 1)
-Entering state 1
-Stack now 0 6 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (9.1: 1)
--> $$ = nterm exp (9.1: 1)
-Entering state 8
-Stack now 0 6 8
+1.2: syntax error: invalid character: '#'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
 Reading a token
-Next token is token '-' (9.3: )
-Shifting token '-' (9.3: )
-Entering state 20
-Stack now 0 6 8 20
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
 Reading a token
-Next token is token number (9.5: 2)
-Shifting token number (9.5: 2)
-Entering state 1
-Stack now 0 6 8 20 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (9.5: 2)
--> $$ = nterm exp (9.5: 2)
-Entering state 29
-Stack now 0 6 8 20 29
+Next token is token number (1.6: 1)
+Error: discarding token number (1.6: 1)
+Error: popping token error (1.2-4: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
 Reading a token
-Next token is token '-' (9.7: )
-Reducing stack by rule 8 (line 113):
-   $1 = nterm exp (9.1: 1)
-   $2 = token '-' (9.3: )
-   $3 = nterm exp (9.5: 2)
--> $$ = nterm exp (9.1-5: -1)
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
-Stack now 0 6 8
-Next token is token '-' (9.7: )
-Shifting token '-' (9.7: )
-Entering state 20
-Stack now 0 6 8 20
-Reading a token
-Next token is token number (9.9: 3)
-Shifting token number (9.9: 3)
-Entering state 1
-Stack now 0 6 8 20 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (9.9: 3)
--> $$ = nterm exp (9.9: 3)
-Entering state 29
-Stack now 0 6 8 20 29
+Stack now 0 8
 Reading a token
-Next token is token '=' (9.11: )
-Reducing stack by rule 8 (line 113):
-   $1 = nterm exp (9.1-5: -1)
-   $2 = token '-' (9.7: )
-   $3 = nterm exp (9.9: 3)
--> $$ = nterm exp (9.1-9: -4)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (9.11: )
-Shifting token '=' (9.11: )
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
 Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token '-' (9.13: )
-Shifting token '-' (9.13: )
-Entering state 2
-Stack now 0 6 8 19 2
+Stack now 0 8 19
 Reading a token
-Next token is token number (9.14: 4)
-Shifting token number (9.14: 4)
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
 Entering state 1
-Stack now 0 6 8 19 2 1
+Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (9.14: 4)
--> $$ = nterm exp (9.14: 4)
-Entering state 10
-Stack now 0 6 8 19 2 10
-Reading a token
-Next token is token '\n' (9.15-10.0: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (9.13: )
-   $2 = nterm exp (9.14: 4)
--> $$ = nterm exp (9.13-14: -4)
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
 Entering state 28
-Stack now 0 6 8 19 28
-Next token is token '\n' (9.15-10.0: )
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.15-2.0: )
 Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (9.1-9: -4)
-   $2 = token '=' (9.11: )
-   $3 = nterm exp (9.13-14: -4)
--> $$ = nterm exp (9.1-14: -4)
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
 Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (9.15-10.0: )
-Shifting token '\n' (9.15-10.0: )
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
 Entering state 25
-Stack now 0 6 8 25
+Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (9.1-14: -4)
-   $2 = token '\n' (9.15-10.0: )
--> $$ = nterm line (9.1-10.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-9.0: )
-   $2 = nterm line (9.1-10.0: )
--> $$ = nterm input (1.1-10.0: )
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
 Reading a token
-Next token is token number (10.1: 1)
-Shifting token number (10.1: 1)
-Entering state 1
-Stack now 0 6 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (10.1: 1)
--> $$ = nterm exp (10.1: 1)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '-' (10.3: )
-Shifting token '-' (10.3: )
-Entering state 20
-Stack now 0 6 8 20
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 1 = 2 = 3
+./calc.at:1362:  $PREPARSER ./calc  input
+  | 1 + 2 * 3 + !* ++
+stderr:
+./calc.at:1358:  $PREPARSER ./calc  input
+./calc.at:1360: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
 Reading a token
-Next token is token '(' (10.5: )
-Shifting token '(' (10.5: )
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
 Entering state 4
-Stack now 0 6 8 20 4
-Reading a token
-Next token is token number (10.6: 2)
-Shifting token number (10.6: 2)
-Entering state 1
-Stack now 0 6 8 20 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (10.6: 2)
--> $$ = nterm exp (10.6: 2)
-Entering state 12
-Stack now 0 6 8 20 4 12
+Stack now 0 4
 Reading a token
-Next token is token '-' (10.8: )
-Shifting token '-' (10.8: )
-Entering state 20
-Stack now 0 6 8 20 4 12 20
+1.2: syntax error: invalid character: '#'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
 Reading a token
-Next token is token number (10.10: 3)
-Shifting token number (10.10: 3)
-Entering state 1
-Stack now 0 6 8 20 4 12 20 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (10.10: 3)
--> $$ = nterm exp (10.10: 3)
-Entering state 29
-Stack now 0 6 8 20 4 12 20 29
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
 Reading a token
-Next token is token ')' (10.11: )
-Reducing stack by rule 8 (line 113):
-   $1 = nterm exp (10.6: 2)
-   $2 = token '-' (10.8: )
-   $3 = nterm exp (10.10: 3)
--> $$ = nterm exp (10.6-10: -1)
-Entering state 12
-Stack now 0 6 8 20 4 12
-Next token is token ')' (10.11: )
-Shifting token ')' (10.11: )
-Entering state 27
-Stack now 0 6 8 20 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (10.5: )
-   $2 = nterm exp (10.6-10: -1)
-   $3 = token ')' (10.11: )
--> $$ = nterm exp (10.5-11: -1)
-Entering state 29
-Stack now 0 6 8 20 29
+Next token is token number (1.6: 1)
+Error: discarding token number (1.6: 1)
+Error: popping token error (1.2-4: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
 Reading a token
-Next token is token '=' (10.13: )
-Reducing stack by rule 8 (line 113):
-   $1 = nterm exp (10.1: 1)
-   $2 = token '-' (10.3: )
-   $3 = nterm exp (10.5-11: -1)
--> $$ = nterm exp (10.1-11: 2)
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
-Stack now 0 6 8
-Next token is token '=' (10.13: )
-Shifting token '=' (10.13: )
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
 Entering state 19
-Stack now 0 6 8 19
+Stack now 0 8 19
 Reading a token
-Next token is token number (10.15: 2)
-Shifting token number (10.15: 2)
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
 Entering state 1
-Stack now 0 6 8 19 1
+Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (10.15: 2)
--> $$ = nterm exp (10.15: 2)
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
 Entering state 28
-Stack now 0 6 8 19 28
+Stack now 0 8 19 28
 Reading a token
-Next token is token '\n' (10.16-11.0: )
+Next token is token '\n' (1.15-2.0: )
 Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (10.1-11: 2)
-   $2 = token '=' (10.13: )
-   $3 = nterm exp (10.15: 2)
--> $$ = nterm exp (10.1-15: 2)
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
 Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (10.16-11.0: )
-Shifting token '\n' (10.16-11.0: )
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
 Entering state 25
-Stack now 0 6 8 25
+Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (10.1-15: 2)
-   $2 = token '\n' (10.16-11.0: )
--> $$ = nterm line (10.1-11.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-10.0: )
-   $2 = nterm line (10.1-11.0: )
--> $$ = nterm input (1.1-11.0: )
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
 Reading a token
-Next token is token '\n' (11.1-12.0: )
-Shifting token '\n' (11.1-12.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 96):
-   $1 = token '\n' (11.1-12.0: )
--> $$ = nterm line (11.1-12.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-11.0: )
-   $2 = nterm line (11.1-12.0: )
--> $$ = nterm input (1.1-12.0: )
-Entering state 6
-Stack now 0 6
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
 Reading a token
-Next token is token number (12.1: 2)
-Shifting token number (12.1: 2)
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
 Entering state 1
-Stack now 0 6 1
+Stack now 0 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (12.1: 2)
--> $$ = nterm exp (12.1: 2)
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '^' (12.2: )
-Shifting token '^' (12.2: )
-Entering state 24
-Stack now 0 6 8 24
-Reading a token
-Next token is token number (12.3: 2)
-Shifting token number (12.3: 2)
-Entering state 1
-Stack now 0 6 8 24 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (12.3: 2)
--> $$ = nterm exp (12.3: 2)
-Entering state 33
-Stack now 0 6 8 24 33
+Stack now 0 8
 Reading a token
-Next token is token '^' (12.4: )
-Shifting token '^' (12.4: )
-Entering state 24
-Stack now 0 6 8 24 33 24
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 19
+Stack now 0 8 19
 Reading a token
-Next token is token number (12.5: 3)
-Shifting token number (12.5: 3)
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
 Entering state 1
-Stack now 0 6 8 24 33 24 1
+Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (12.5: 3)
--> $$ = nterm exp (12.5: 3)
-Entering state 33
-Stack now 0 6 8 24 33 24 33
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 28
+Stack now 0 8 19 28
 Reading a token
-Next token is token '=' (12.7: )
-Reducing stack by rule 12 (line 125):
-   $1 = nterm exp (12.3: 2)
-   $2 = token '^' (12.4: )
-   $3 = nterm exp (12.5: 3)
--> $$ = nterm exp (12.3-5: 8)
-Entering state 33
-Stack now 0 6 8 24 33
-Next token is token '=' (12.7: )
-Reducing stack by rule 12 (line 125):
-   $1 = nterm exp (12.1: 2)
-   $2 = token '^' (12.2: )
-   $3 = nterm exp (12.3-5: 8)
--> $$ = nterm exp (12.1-5: 256)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (12.7: )
-Shifting token '=' (12.7: )
-Entering state 19
-Stack now 0 6 8 19
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Stack now 0 8 19
+Error: popping token '=' (1.3: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '=' (1.7: )
+Stack now 0
+Starting parse
+Entering state 0
+Stack now 0
 Reading a token
-Next token is token number (12.9-11: 256)
-Shifting token number (12.9-11: 256)
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
 Entering state 1
-Stack now 0 6 8 19 1
+Stack now 0 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (12.9-11: 256)
--> $$ = nterm exp (12.9-11: 256)
-Entering state 28
-Stack now 0 6 8 19 28
-Reading a token
-Next token is token '\n' (12.12-13.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (12.1-5: 256)
-   $2 = token '=' (12.7: )
-   $3 = nterm exp (12.9-11: 256)
--> $$ = nterm exp (12.1-11: 256)
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (12.12-13.0: )
-Shifting token '\n' (12.12-13.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (12.1-11: 256)
-   $2 = token '\n' (12.12-13.0: )
--> $$ = nterm line (12.1-13.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-12.0: )
-   $2 = nterm line (12.1-13.0: )
--> $$ = nterm input (1.1-13.0: )
-Entering state 6
-Stack now 0 6
+Stack now 0 8
+Return for a new token:
 Reading a token
-Next token is token '(' (13.1: )
-Shifting token '(' (13.1: )
-Entering state 4
-Stack now 0 6 4
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
+Return for a new token:
 Reading a token
-Next token is token number (13.2: 2)
-Shifting token number (13.2: 2)
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
 Entering state 1
-Stack now 0 6 4 1
+Stack now 0 8 21 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (13.2: 2)
--> $$ = nterm exp (13.2: 2)
-Entering state 12
-Stack now 0 6 4 12
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
+Return for a new token:
 Reading a token
-Next token is token '^' (13.3: )
-Shifting token '^' (13.3: )
-Entering state 24
-Stack now 0 6 4 12 24
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
+Return for a new token:
 Reading a token
-Next token is token number (13.4: 2)
-Shifting token number (13.4: 2)
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
 Entering state 1
-Stack now 0 6 4 12 24 1
+Stack now 0 8 21 30 22 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (13.4: 2)
--> $$ = nterm exp (13.4: 2)
-Entering state 33
-Stack now 0 6 4 12 24 33
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Return for a new token:
 Reading a token
-Next token is token ')' (13.5: )
-Reducing stack by rule 12 (line 125):
-   $1 = nterm exp (13.2: 2)
-   $2 = token '^' (13.3: )
-   $3 = nterm exp (13.4: 2)
--> $$ = nterm exp (13.2-4: 4)
-Entering state 12
-Stack now 0 6 4 12
-Next token is token ')' (13.5: )
-Shifting token ')' (13.5: )
-Entering state 27
-Stack now 0 6 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (13.1: )
-   $2 = nterm exp (13.2-4: 4)
-   $3 = token ')' (13.5: )
--> $$ = nterm exp (13.1-5: 4)
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
-Stack now 0 6 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
+Return for a new token:
 Reading a token
-Next token is token '^' (13.6: )
-Shifting token '^' (13.6: )
-Entering state 24
-Stack now 0 6 8 24
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
+Return for a new token:
 Reading a token
-Next token is token number (13.7: 3)
-Shifting token number (13.7: 3)
+Next token is token '*' (1.14: )
+Shifting token '*' (1.14: )
+Entering state 15
+Stack now 0 8 21 5 15
+Reducing stack by rule 19 (line 132):
+   $1 = token '!' (1.13: )
+   $2 = token '*' (1.14: )
+1.14: memory exhausted
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
+./calc.at:1360: cat stderr
+./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
 Entering state 1
-Stack now 0 6 8 24 1
+Stack now 0 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (13.7: 3)
--> $$ = nterm exp (13.7: 3)
-Entering state 33
-Stack now 0 6 8 24 33
-Reading a token
-Next token is token '=' (13.9: )
-Reducing stack by rule 12 (line 125):
-   $1 = nterm exp (13.1-5: 4)
-   $2 = token '^' (13.6: )
-   $3 = nterm exp (13.7: 3)
--> $$ = nterm exp (13.1-7: 64)
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
-Stack now 0 6 8
-Next token is token '=' (13.9: )
-Shifting token '=' (13.9: )
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
 Entering state 19
-Stack now 0 6 8 19
+Stack now 0 8 19
 Reading a token
-Next token is token number (13.11-12: 64)
-Shifting token number (13.11-12: 64)
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
 Entering state 1
-Stack now 0 6 8 19 1
+Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (13.11-12: 64)
--> $$ = nterm exp (13.11-12: 64)
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
 Entering state 28
-Stack now 0 6 8 19 28
-Reading a token
-Next token is token '\n' (13.13-14.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (13.1-7: 64)
-   $2 = token '=' (13.9: )
-   $3 = nterm exp (13.11-12: 64)
--> $$ = nterm exp (13.1-12: 64)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (13.13-14.0: )
-Shifting token '\n' (13.13-14.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (13.1-12: 64)
-   $2 = token '\n' (13.13-14.0: )
--> $$ = nterm line (13.1-14.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-13.0: )
-   $2 = nterm line (13.1-14.0: )
--> $$ = nterm input (1.1-14.0: )
-Entering state 6
-Stack now 0 6
+Stack now 0 8 19 28
 Reading a token
-Now at end of input.
-Shifting token end of file (14.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (14.1: )
-Cleanup: popping nterm input (1.1-14.0: )
-./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1358: "$PERL" -pi -e 'use strict;
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Stack now 0 8 19
+Error: popping token '=' (1.3: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '=' (1.7: )
+Stack now 0
+./calc.at:1357: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -61343,8 +66027,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-./calc.at:1358: cat stderr
+input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -61358,11 +66041,13 @@
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Stack now 0 8
+Return for a new token:
 Reading a token
 Next token is token '+' (1.3: )
 Shifting token '+' (1.3: )
 Entering state 21
 Stack now 0 8 21
+Return for a new token:
 Reading a token
 Next token is token number (1.5: 2)
 Shifting token number (1.5: 2)
@@ -61373,11 +66058,13 @@
 -> $$ = nterm exp (1.5: 2)
 Entering state 30
 Stack now 0 8 21 30
+Return for a new token:
 Reading a token
 Next token is token '*' (1.7: )
 Shifting token '*' (1.7: )
 Entering state 22
 Stack now 0 8 21 30 22
+Return for a new token:
 Reading a token
 Next token is token number (1.9: 3)
 Shifting token number (1.9: 3)
@@ -61388,8 +66075,9 @@
 -> $$ = nterm exp (1.9: 3)
 Entering state 31
 Stack now 0 8 21 30 22 31
+Return for a new token:
 Reading a token
-Next token is token '=' (1.11: )
+Next token is token '+' (1.11: )
 Reducing stack by rule 9 (line 114):
    $1 = nterm exp (1.5: 2)
    $2 = token '*' (1.7: )
@@ -61397,7 +66085,7 @@
 -> $$ = nterm exp (1.5-9: 6)
 Entering state 30
 Stack now 0 8 21 30
-Next token is token '=' (1.11: )
+Next token is token '+' (1.11: )
 Reducing stack by rule 7 (line 112):
    $1 = nterm exp (1.1: 1)
    $2 = token '+' (1.3: )
@@ -61405,968 +66093,239 @@
 -> $$ = nterm exp (1.1-9: 7)
 Entering state 8
 Stack now 0 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 19
-Stack now 0 8 19
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
+Return for a new token:
 Reading a token
-Next token is token number (1.13: 7)
-Shifting token number (1.13: 7)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.13: 7)
--> $$ = nterm exp (1.13: 7)
-Entering state 28
-Stack now 0 8 19 28
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
+Return for a new token:
 Reading a token
-Next token is token '\n' (1.14-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-9: 7)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13: 7)
--> $$ = nterm exp (1.1-13: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.14-2.0: )
-Shifting token '\n' (1.14-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-13: 7)
-   $2 = token '\n' (1.14-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
+Next token is token '*' (1.14: )
+Shifting token '*' (1.14: )
+Entering state 15
+Stack now 0 8 21 5 15
+Reducing stack by rule 19 (line 132):
+   $1 = token '!' (1.13: )
+   $2 = token '*' (1.14: )
+1.14: memory exhausted
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1360:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
 Reading a token
-Next token is token number (2.1: 1)
-Shifting token number (2.1: 1)
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
 Entering state 1
-Stack now 0 6 1
+Stack now 0 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (2.1: 1)
--> $$ = nterm exp (2.1: 1)
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
-Stack now 0 6 8
+Stack now 0 8
 Reading a token
-Next token is token '+' (2.3: )
-Shifting token '+' (2.3: )
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
 Entering state 21
-Stack now 0 6 8 21
+Stack now 0 8 21
 Reading a token
-Next token is token number (2.5: 2)
-Shifting token number (2.5: 2)
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
 Entering state 1
-Stack now 0 6 8 21 1
+Stack now 0 8 21 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (2.5: 2)
--> $$ = nterm exp (2.5: 2)
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
 Entering state 30
-Stack now 0 6 8 21 30
+Stack now 0 8 21 30
 Reading a token
-Next token is token '*' (2.7: )
-Shifting token '*' (2.7: )
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
 Entering state 22
-Stack now 0 6 8 21 30 22
-Reading a token
-Next token is token '-' (2.9: )
-Shifting token '-' (2.9: )
-Entering state 2
-Stack now 0 6 8 21 30 22 2
+Stack now 0 8 21 30 22
 Reading a token
-Next token is token number (2.10: 3)
-Shifting token number (2.10: 3)
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
 Entering state 1
-Stack now 0 6 8 21 30 22 2 1
+Stack now 0 8 21 30 22 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (2.10: 3)
--> $$ = nterm exp (2.10: 3)
-Entering state 10
-Stack now 0 6 8 21 30 22 2 10
-Reading a token
-Next token is token '=' (2.12: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (2.9: )
-   $2 = nterm exp (2.10: 3)
--> $$ = nterm exp (2.9-10: -3)
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
 Entering state 31
-Stack now 0 6 8 21 30 22 31
-Next token is token '=' (2.12: )
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.11: )
 Reducing stack by rule 9 (line 114):
-   $1 = nterm exp (2.5: 2)
-   $2 = token '*' (2.7: )
-   $3 = nterm exp (2.9-10: -3)
--> $$ = nterm exp (2.5-10: -6)
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
 Entering state 30
-Stack now 0 6 8 21 30
-Next token is token '=' (2.12: )
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
 Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (2.1: 1)
-   $2 = token '+' (2.3: )
-   $3 = nterm exp (2.5-10: -6)
--> $$ = nterm exp (2.1-10: -5)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (2.12: )
-Shifting token '=' (2.12: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token '-' (2.14: )
-Shifting token '-' (2.14: )
-Entering state 2
-Stack now 0 6 8 19 2
-Reading a token
-Next token is token number (2.15: 5)
-Shifting token number (2.15: 5)
-Entering state 1
-Stack now 0 6 8 19 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (2.15: 5)
--> $$ = nterm exp (2.15: 5)
-Entering state 10
-Stack now 0 6 8 19 2 10
-Reading a token
-Next token is token '\n' (2.16-3.0: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (2.14: )
-   $2 = nterm exp (2.15: 5)
--> $$ = nterm exp (2.14-15: -5)
-Entering state 28
-Stack now 0 6 8 19 28
-Next token is token '\n' (2.16-3.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (2.1-10: -5)
-   $2 = token '=' (2.12: )
-   $3 = nterm exp (2.14-15: -5)
--> $$ = nterm exp (2.1-15: -5)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (2.16-3.0: )
-Shifting token '\n' (2.16-3.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (2.1-15: -5)
-   $2 = token '\n' (2.16-3.0: )
--> $$ = nterm line (2.1-3.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-2.0: )
-   $2 = nterm line (2.1-3.0: )
--> $$ = nterm input (1.1-3.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '\n' (3.1-4.0: )
-Shifting token '\n' (3.1-4.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 96):
-   $1 = token '\n' (3.1-4.0: )
--> $$ = nterm line (3.1-4.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-3.0: )
-   $2 = nterm line (3.1-4.0: )
--> $$ = nterm input (1.1-4.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '-' (4.1: )
-Shifting token '-' (4.1: )
-Entering state 2
-Stack now 0 6 2
-Reading a token
-Next token is token number (4.2: 1)
-Shifting token number (4.2: 1)
-Entering state 1
-Stack now 0 6 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (4.2: 1)
--> $$ = nterm exp (4.2: 1)
-Entering state 10
-Stack now 0 6 2 10
-Reading a token
-Next token is token '^' (4.3: )
-Shifting token '^' (4.3: )
-Entering state 24
-Stack now 0 6 2 10 24
-Reading a token
-Next token is token number (4.4: 2)
-Shifting token number (4.4: 2)
-Entering state 1
-Stack now 0 6 2 10 24 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (4.4: 2)
--> $$ = nterm exp (4.4: 2)
-Entering state 33
-Stack now 0 6 2 10 24 33
-Reading a token
-Next token is token '=' (4.6: )
-Reducing stack by rule 12 (line 125):
-   $1 = nterm exp (4.2: 1)
-   $2 = token '^' (4.3: )
-   $3 = nterm exp (4.4: 2)
--> $$ = nterm exp (4.2-4: 1)
-Entering state 10
-Stack now 0 6 2 10
-Next token is token '=' (4.6: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (4.1: )
-   $2 = nterm exp (4.2-4: 1)
--> $$ = nterm exp (4.1-4: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (4.6: )
-Shifting token '=' (4.6: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token '-' (4.8: )
-Shifting token '-' (4.8: )
-Entering state 2
-Stack now 0 6 8 19 2
-Reading a token
-Next token is token number (4.9: 1)
-Shifting token number (4.9: 1)
-Entering state 1
-Stack now 0 6 8 19 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (4.9: 1)
--> $$ = nterm exp (4.9: 1)
-Entering state 10
-Stack now 0 6 8 19 2 10
-Reading a token
-Next token is token '\n' (4.10-5.0: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (4.8: )
-   $2 = nterm exp (4.9: 1)
--> $$ = nterm exp (4.8-9: -1)
-Entering state 28
-Stack now 0 6 8 19 28
-Next token is token '\n' (4.10-5.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (4.1-4: -1)
-   $2 = token '=' (4.6: )
-   $3 = nterm exp (4.8-9: -1)
--> $$ = nterm exp (4.1-9: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (4.10-5.0: )
-Shifting token '\n' (4.10-5.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (4.1-9: -1)
-   $2 = token '\n' (4.10-5.0: )
--> $$ = nterm line (4.1-5.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-4.0: )
-   $2 = nterm line (4.1-5.0: )
--> $$ = nterm input (1.1-5.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '(' (5.1: )
-Shifting token '(' (5.1: )
-Entering state 4
-Stack now 0 6 4
-Reading a token
-Next token is token '-' (5.2: )
-Shifting token '-' (5.2: )
-Entering state 2
-Stack now 0 6 4 2
-Reading a token
-Next token is token number (5.3: 1)
-Shifting token number (5.3: 1)
-Entering state 1
-Stack now 0 6 4 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (5.3: 1)
--> $$ = nterm exp (5.3: 1)
-Entering state 10
-Stack now 0 6 4 2 10
-Reading a token
-Next token is token ')' (5.4: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (5.2: )
-   $2 = nterm exp (5.3: 1)
--> $$ = nterm exp (5.2-3: -1)
-Entering state 12
-Stack now 0 6 4 12
-Next token is token ')' (5.4: )
-Shifting token ')' (5.4: )
-Entering state 27
-Stack now 0 6 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (5.1: )
-   $2 = nterm exp (5.2-3: -1)
-   $3 = token ')' (5.4: )
--> $$ = nterm exp (5.1-4: -1)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '^' (5.5: )
-Shifting token '^' (5.5: )
-Entering state 24
-Stack now 0 6 8 24
-Reading a token
-Next token is token number (5.6: 2)
-Shifting token number (5.6: 2)
-Entering state 1
-Stack now 0 6 8 24 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (5.6: 2)
--> $$ = nterm exp (5.6: 2)
-Entering state 33
-Stack now 0 6 8 24 33
-Reading a token
-Next token is token '=' (5.8: )
-Reducing stack by rule 12 (line 125):
-   $1 = nterm exp (5.1-4: -1)
-   $2 = token '^' (5.5: )
-   $3 = nterm exp (5.6: 2)
--> $$ = nterm exp (5.1-6: 1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (5.8: )
-Shifting token '=' (5.8: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token number (5.10: 1)
-Shifting token number (5.10: 1)
-Entering state 1
-Stack now 0 6 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (5.10: 1)
--> $$ = nterm exp (5.10: 1)
-Entering state 28
-Stack now 0 6 8 19 28
-Reading a token
-Next token is token '\n' (5.11-6.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (5.1-6: 1)
-   $2 = token '=' (5.8: )
-   $3 = nterm exp (5.10: 1)
--> $$ = nterm exp (5.1-10: 1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (5.11-6.0: )
-Shifting token '\n' (5.11-6.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (5.1-10: 1)
-   $2 = token '\n' (5.11-6.0: )
--> $$ = nterm line (5.1-6.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-5.0: )
-   $2 = nterm line (5.1-6.0: )
--> $$ = nterm input (1.1-6.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '\n' (6.1-7.0: )
-Shifting token '\n' (6.1-7.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 96):
-   $1 = token '\n' (6.1-7.0: )
--> $$ = nterm line (6.1-7.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-6.0: )
-   $2 = nterm line (6.1-7.0: )
--> $$ = nterm input (1.1-7.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '-' (7.1: )
-Shifting token '-' (7.1: )
-Entering state 2
-Stack now 0 6 2
-Reading a token
-Next token is token '-' (7.2: )
-Shifting token '-' (7.2: )
-Entering state 2
-Stack now 0 6 2 2
-Reading a token
-Next token is token '-' (7.3: )
-Shifting token '-' (7.3: )
-Entering state 2
-Stack now 0 6 2 2 2
-Reading a token
-Next token is token number (7.4: 1)
-Shifting token number (7.4: 1)
-Entering state 1
-Stack now 0 6 2 2 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (7.4: 1)
--> $$ = nterm exp (7.4: 1)
-Entering state 10
-Stack now 0 6 2 2 2 10
-Reading a token
-Next token is token '=' (7.6: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (7.3: )
-   $2 = nterm exp (7.4: 1)
--> $$ = nterm exp (7.3-4: -1)
-Entering state 10
-Stack now 0 6 2 2 10
-Next token is token '=' (7.6: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (7.2: )
-   $2 = nterm exp (7.3-4: -1)
--> $$ = nterm exp (7.2-4: 1)
-Entering state 10
-Stack now 0 6 2 10
-Next token is token '=' (7.6: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (7.1: )
-   $2 = nterm exp (7.2-4: 1)
--> $$ = nterm exp (7.1-4: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (7.6: )
-Shifting token '=' (7.6: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token '-' (7.8: )
-Shifting token '-' (7.8: )
-Entering state 2
-Stack now 0 6 8 19 2
-Reading a token
-Next token is token number (7.9: 1)
-Shifting token number (7.9: 1)
-Entering state 1
-Stack now 0 6 8 19 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (7.9: 1)
--> $$ = nterm exp (7.9: 1)
-Entering state 10
-Stack now 0 6 8 19 2 10
-Reading a token
-Next token is token '\n' (7.10-8.0: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (7.8: )
-   $2 = nterm exp (7.9: 1)
--> $$ = nterm exp (7.8-9: -1)
-Entering state 28
-Stack now 0 6 8 19 28
-Next token is token '\n' (7.10-8.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (7.1-4: -1)
-   $2 = token '=' (7.6: )
-   $3 = nterm exp (7.8-9: -1)
--> $$ = nterm exp (7.1-9: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (7.10-8.0: )
-Shifting token '\n' (7.10-8.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (7.1-9: -1)
-   $2 = token '\n' (7.10-8.0: )
--> $$ = nterm line (7.1-8.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-7.0: )
-   $2 = nterm line (7.1-8.0: )
--> $$ = nterm input (1.1-8.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '\n' (8.1-9.0: )
-Shifting token '\n' (8.1-9.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 96):
-   $1 = token '\n' (8.1-9.0: )
--> $$ = nterm line (8.1-9.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-8.0: )
-   $2 = nterm line (8.1-9.0: )
--> $$ = nterm input (1.1-9.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token number (9.1: 1)
-Shifting token number (9.1: 1)
-Entering state 1
-Stack now 0 6 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (9.1: 1)
--> $$ = nterm exp (9.1: 1)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '-' (9.3: )
-Shifting token '-' (9.3: )
-Entering state 20
-Stack now 0 6 8 20
-Reading a token
-Next token is token number (9.5: 2)
-Shifting token number (9.5: 2)
-Entering state 1
-Stack now 0 6 8 20 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (9.5: 2)
--> $$ = nterm exp (9.5: 2)
-Entering state 29
-Stack now 0 6 8 20 29
-Reading a token
-Next token is token '-' (9.7: )
-Reducing stack by rule 8 (line 113):
-   $1 = nterm exp (9.1: 1)
-   $2 = token '-' (9.3: )
-   $3 = nterm exp (9.5: 2)
--> $$ = nterm exp (9.1-5: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '-' (9.7: )
-Shifting token '-' (9.7: )
-Entering state 20
-Stack now 0 6 8 20
-Reading a token
-Next token is token number (9.9: 3)
-Shifting token number (9.9: 3)
-Entering state 1
-Stack now 0 6 8 20 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (9.9: 3)
--> $$ = nterm exp (9.9: 3)
-Entering state 29
-Stack now 0 6 8 20 29
-Reading a token
-Next token is token '=' (9.11: )
-Reducing stack by rule 8 (line 113):
-   $1 = nterm exp (9.1-5: -1)
-   $2 = token '-' (9.7: )
-   $3 = nterm exp (9.9: 3)
--> $$ = nterm exp (9.1-9: -4)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (9.11: )
-Shifting token '=' (9.11: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token '-' (9.13: )
-Shifting token '-' (9.13: )
-Entering state 2
-Stack now 0 6 8 19 2
-Reading a token
-Next token is token number (9.14: 4)
-Shifting token number (9.14: 4)
-Entering state 1
-Stack now 0 6 8 19 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (9.14: 4)
--> $$ = nterm exp (9.14: 4)
-Entering state 10
-Stack now 0 6 8 19 2 10
-Reading a token
-Next token is token '\n' (9.15-10.0: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (9.13: )
-   $2 = nterm exp (9.14: 4)
--> $$ = nterm exp (9.13-14: -4)
-Entering state 28
-Stack now 0 6 8 19 28
-Next token is token '\n' (9.15-10.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (9.1-9: -4)
-   $2 = token '=' (9.11: )
-   $3 = nterm exp (9.13-14: -4)
--> $$ = nterm exp (9.1-14: -4)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (9.15-10.0: )
-Shifting token '\n' (9.15-10.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (9.1-14: -4)
-   $2 = token '\n' (9.15-10.0: )
--> $$ = nterm line (9.1-10.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-9.0: )
-   $2 = nterm line (9.1-10.0: )
--> $$ = nterm input (1.1-10.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token number (10.1: 1)
-Shifting token number (10.1: 1)
-Entering state 1
-Stack now 0 6 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (10.1: 1)
--> $$ = nterm exp (10.1: 1)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '-' (10.3: )
-Shifting token '-' (10.3: )
-Entering state 20
-Stack now 0 6 8 20
-Reading a token
-Next token is token '(' (10.5: )
-Shifting token '(' (10.5: )
-Entering state 4
-Stack now 0 6 8 20 4
-Reading a token
-Next token is token number (10.6: 2)
-Shifting token number (10.6: 2)
-Entering state 1
-Stack now 0 6 8 20 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (10.6: 2)
--> $$ = nterm exp (10.6: 2)
-Entering state 12
-Stack now 0 6 8 20 4 12
-Reading a token
-Next token is token '-' (10.8: )
-Shifting token '-' (10.8: )
-Entering state 20
-Stack now 0 6 8 20 4 12 20
-Reading a token
-Next token is token number (10.10: 3)
-Shifting token number (10.10: 3)
-Entering state 1
-Stack now 0 6 8 20 4 12 20 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (10.10: 3)
--> $$ = nterm exp (10.10: 3)
-Entering state 29
-Stack now 0 6 8 20 4 12 20 29
-Reading a token
-Next token is token ')' (10.11: )
-Reducing stack by rule 8 (line 113):
-   $1 = nterm exp (10.6: 2)
-   $2 = token '-' (10.8: )
-   $3 = nterm exp (10.10: 3)
--> $$ = nterm exp (10.6-10: -1)
-Entering state 12
-Stack now 0 6 8 20 4 12
-Next token is token ')' (10.11: )
-Shifting token ')' (10.11: )
-Entering state 27
-Stack now 0 6 8 20 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (10.5: )
-   $2 = nterm exp (10.6-10: -1)
-   $3 = token ')' (10.11: )
--> $$ = nterm exp (10.5-11: -1)
-Entering state 29
-Stack now 0 6 8 20 29
-Reading a token
-Next token is token '=' (10.13: )
-Reducing stack by rule 8 (line 113):
-   $1 = nterm exp (10.1: 1)
-   $2 = token '-' (10.3: )
-   $3 = nterm exp (10.5-11: -1)
--> $$ = nterm exp (10.1-11: 2)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (10.13: )
-Shifting token '=' (10.13: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token number (10.15: 2)
-Shifting token number (10.15: 2)
-Entering state 1
-Stack now 0 6 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (10.15: 2)
--> $$ = nterm exp (10.15: 2)
-Entering state 28
-Stack now 0 6 8 19 28
-Reading a token
-Next token is token '\n' (10.16-11.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (10.1-11: 2)
-   $2 = token '=' (10.13: )
-   $3 = nterm exp (10.15: 2)
--> $$ = nterm exp (10.1-15: 2)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (10.16-11.0: )
-Shifting token '\n' (10.16-11.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (10.1-15: 2)
-   $2 = token '\n' (10.16-11.0: )
--> $$ = nterm line (10.1-11.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-10.0: )
-   $2 = nterm line (10.1-11.0: )
--> $$ = nterm input (1.1-11.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '\n' (11.1-12.0: )
-Shifting token '\n' (11.1-12.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 96):
-   $1 = token '\n' (11.1-12.0: )
--> $$ = nterm line (11.1-12.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-11.0: )
-   $2 = nterm line (11.1-12.0: )
--> $$ = nterm input (1.1-12.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token number (12.1: 2)
-Shifting token number (12.1: 2)
-Entering state 1
-Stack now 0 6 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (12.1: 2)
--> $$ = nterm exp (12.1: 2)
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '^' (12.2: )
-Shifting token '^' (12.2: )
-Entering state 24
-Stack now 0 6 8 24
-Reading a token
-Next token is token number (12.3: 2)
-Shifting token number (12.3: 2)
-Entering state 1
-Stack now 0 6 8 24 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (12.3: 2)
--> $$ = nterm exp (12.3: 2)
-Entering state 33
-Stack now 0 6 8 24 33
-Reading a token
-Next token is token '^' (12.4: )
-Shifting token '^' (12.4: )
-Entering state 24
-Stack now 0 6 8 24 33 24
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
-Next token is token number (12.5: 3)
-Shifting token number (12.5: 3)
-Entering state 1
-Stack now 0 6 8 24 33 24 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (12.5: 3)
--> $$ = nterm exp (12.5: 3)
-Entering state 33
-Stack now 0 6 8 24 33 24 33
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
 Reading a token
-Next token is token '=' (12.7: )
-Reducing stack by rule 12 (line 125):
-   $1 = nterm exp (12.3: 2)
-   $2 = token '^' (12.4: )
-   $3 = nterm exp (12.5: 3)
--> $$ = nterm exp (12.3-5: 8)
-Entering state 33
-Stack now 0 6 8 24 33
-Next token is token '=' (12.7: )
-Reducing stack by rule 12 (line 125):
-   $1 = nterm exp (12.1: 2)
-   $2 = token '^' (12.2: )
-   $3 = nterm exp (12.3-5: 8)
--> $$ = nterm exp (12.1-5: 256)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (12.7: )
-Shifting token '=' (12.7: )
-Entering state 19
-Stack now 0 6 8 19
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Stack now 0 8 21 5 14
+Reducing stack by rule 17 (line 130):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1357: cat stderr
+./calc.at:1362: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1358: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1362: cat stderr
+./calc.at:1358: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
 Reading a token
-Next token is token number (12.9-11: 256)
-Shifting token number (12.9-11: 256)
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
 Entering state 1
-Stack now 0 6 8 19 1
+Stack now 0 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (12.9-11: 256)
--> $$ = nterm exp (12.9-11: 256)
-Entering state 28
-Stack now 0 6 8 19 28
-Reading a token
-Next token is token '\n' (12.12-13.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (12.1-5: 256)
-   $2 = token '=' (12.7: )
-   $3 = nterm exp (12.9-11: 256)
--> $$ = nterm exp (12.1-11: 256)
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (12.12-13.0: )
-Shifting token '\n' (12.12-13.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (12.1-11: 256)
-   $2 = token '\n' (12.12-13.0: )
--> $$ = nterm line (12.1-13.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-12.0: )
-   $2 = nterm line (12.1-13.0: )
--> $$ = nterm input (1.1-13.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '(' (13.1: )
-Shifting token '(' (13.1: )
-Entering state 4
-Stack now 0 6 4
-Reading a token
-Next token is token number (13.2: 2)
-Shifting token number (13.2: 2)
-Entering state 1
-Stack now 0 6 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (13.2: 2)
--> $$ = nterm exp (13.2: 2)
-Entering state 12
-Stack now 0 6 4 12
+Stack now 0 8
 Reading a token
-Next token is token '^' (13.3: )
-Shifting token '^' (13.3: )
-Entering state 24
-Stack now 0 6 4 12 24
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
-Next token is token number (13.4: 2)
-Shifting token number (13.4: 2)
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
 Entering state 1
-Stack now 0 6 4 12 24 1
+Stack now 0 8 21 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (13.4: 2)
--> $$ = nterm exp (13.4: 2)
-Entering state 33
-Stack now 0 6 4 12 24 33
-Reading a token
-Next token is token ')' (13.5: )
-Reducing stack by rule 12 (line 125):
-   $1 = nterm exp (13.2: 2)
-   $2 = token '^' (13.3: )
-   $3 = nterm exp (13.4: 2)
--> $$ = nterm exp (13.2-4: 4)
-Entering state 12
-Stack now 0 6 4 12
-Next token is token ')' (13.5: )
-Shifting token ')' (13.5: )
-Entering state 27
-Stack now 0 6 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (13.1: )
-   $2 = nterm exp (13.2-4: 4)
-   $3 = token ')' (13.5: )
--> $$ = nterm exp (13.1-5: 4)
-Entering state 8
-Stack now 0 6 8
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
 Reading a token
-Next token is token '^' (13.6: )
-Shifting token '^' (13.6: )
-Entering state 24
-Stack now 0 6 8 24
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
 Reading a token
-Next token is token number (13.7: 3)
-Shifting token number (13.7: 3)
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
 Entering state 1
-Stack now 0 6 8 24 1
+Stack now 0 8 21 30 22 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (13.7: 3)
--> $$ = nterm exp (13.7: 3)
-Entering state 33
-Stack now 0 6 8 24 33
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
 Reading a token
-Next token is token '=' (13.9: )
-Reducing stack by rule 12 (line 125):
-   $1 = nterm exp (13.1-5: 4)
-   $2 = token '^' (13.6: )
-   $3 = nterm exp (13.7: 3)
--> $$ = nterm exp (13.1-7: 64)
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
-Stack now 0 6 8
-Next token is token '=' (13.9: )
-Shifting token '=' (13.9: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token number (13.11-12: 64)
-Shifting token number (13.11-12: 64)
-Entering state 1
-Stack now 0 6 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (13.11-12: 64)
--> $$ = nterm exp (13.11-12: 64)
-Entering state 28
-Stack now 0 6 8 19 28
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
-Next token is token '\n' (13.13-14.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (13.1-7: 64)
-   $2 = token '=' (13.9: )
-   $3 = nterm exp (13.11-12: 64)
--> $$ = nterm exp (13.1-12: 64)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (13.13-14.0: )
-Shifting token '\n' (13.13-14.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (13.1-12: 64)
-   $2 = token '\n' (13.13-14.0: )
--> $$ = nterm line (13.1-14.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-13.0: )
-   $2 = nterm line (13.1-14.0: )
--> $$ = nterm input (1.1-14.0: )
-Entering state 6
-Stack now 0 6
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
 Reading a token
-Now at end of input.
-Shifting token end of file (14.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (14.1: )
-Cleanup: popping nterm input (1.1-14.0: )
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Stack now 0 8 21 5 14
+Reducing stack by rule 17 (line 130):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
 ./calc.at:1360: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 input:
+input:
   | (#) + (#) = 2222
+input:
 ./calc.at:1358:  $PREPARSER ./calc  input
+input:
+492. calc.at:1364: testing Calculator %no-lines api.pure parse.error=verbose %debug %locations %defines api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
+./calc.at:1364:
+    if "$POSIXLY_CORRECT_IS_EXPORTED"; then
+      sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y
+    else
+      mv calc.y.tmp calc.y
+    fi
+
+
 stderr:
+  | 
+  | +1
+./calc.at:1362:  $PREPARSER ./calc  input
+  | 1 + 2 * 3 + !- ++
+./calc.at:1360:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -62498,31 +66457,127 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 1 2
-./calc.at:1360:  $PREPARSER ./calc  input
+  | (1 + # + 1) = 1111
+./calc.at:1357:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
 Entering state 1
-Stack now 0 1
+Stack now 0 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-8: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.10: 1)
+Error: discarding token number (1.10: 1)
+Error: popping token error (1.2-8: )
+Stack now 0 4
+Shifting token error (1.2-10: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token number (1.3: 2)
-1.3: syntax error, unexpected number
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token number (1.3: 2)
-Stack now 0
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -62655,8 +66710,6 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -62671,12 +66724,225 @@
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token number (1.3: 2)
-1.3: syntax error, unexpected number
-Error: popping nterm exp (1.1: 1)
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Stack now 0 8 21 5 13
+Reducing stack by rule 18 (line 131):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+stderr:
+Starting parse
+Entering state 0
 Stack now 0
-Cleanup: discarding lookahead token number (1.3: 2)
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-8: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.10: 1)
+Error: discarding token number (1.10: 1)
+Error: popping token error (1.2-8: )
+Stack now 0 4
+Shifting token error (1.2-10: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1364: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Stack now 0 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Stack now 0
+Cleanup: discarding lookahead token '+' (2.1: )
 Stack now 0
+./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1357: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1358: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -62687,7 +66953,125 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Stack now 0 3
+Reducing stack by rule 3 (line 96):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Stack now 0
+Cleanup: discarding lookahead token '+' (2.1: )
+Stack now 0
+./calc.at:1357: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Stack now 0 8 21 5 13
+Reducing stack by rule 18 (line 131):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
 ./calc.at:1358: cat stderr
+./calc.at:1362: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1360: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -62699,10 +67083,14 @@
   }eg
 ' expout || exit 77
 input:
-  | (1 + #) = 1111
-./calc.at:1358:  $PREPARSER ./calc  input
-./calc.at:1360: cat stderr
+  | (1 + 1) / (1 - 1)
+./calc.at:1362: cat stderr
+./calc.at:1357:  $PREPARSER ./calc  input
+input:
+./calc.at:1362:  $PREPARSER ./calc  /dev/null
 stderr:
+./calc.at:1360: cat stderr
+  | (1 + #) = 1111
 Starting parse
 Entering state 0
 Stack now 0
@@ -62711,7 +67099,6 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Stack now 0 4
-Return for a new token:
 Reading a token
 Next token is token number (1.2: 1)
 Shifting token number (1.2: 1)
@@ -62722,76 +67109,113 @@
 -> $$ = nterm exp (1.2: 1)
 Entering state 12
 Stack now 0 4 12
-Return for a new token:
 Reading a token
 Next token is token '+' (1.4: )
 Shifting token '+' (1.4: )
 Entering state 21
 Stack now 0 4 12 21
-Return for a new token:
-1.6: syntax error: invalid character: '#'
 Reading a token
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Return for a new token:
+Next token is token number (1.6: 1)
+Shifting token number (1.6: 1)
+Entering state 1
+Stack now 0 4 12 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 30
+Stack now 0 4 12 21 30
 Reading a token
 Next token is token ')' (1.7: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.7: )
 Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
+Entering state 27
+Stack now 0 4 12 27
+Reducing stack by rule 13 (line 126):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
+   $2 = nterm exp (1.2-6: 2)
    $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+-> $$ = nterm exp (1.1-7: 2)
 Entering state 8
 Stack now 0 8
-Return for a new token:
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
-Return for a new token:
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 23
+Stack now 0 8 23
 Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Stack now 0 8 23 4
+Reading a token
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
 Entering state 1
-Stack now 0 8 19 1
+Stack now 0 8 23 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Return for a new token:
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Stack now 0 8 23 4 12
 Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 20
+Stack now 0 8 23 4 12 20
+Reading a token
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Stack now 0 8 23 4 12 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 29
+Stack now 0 8 23 4 12 20 29
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Stack now 0 8 23 4 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 27
+Stack now 0 8 23 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 32
+Stack now 0 8 23 32
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 10 (line 115):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -62800,7 +67224,6 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Now at end of input.
 Shifting token end of file (2.1: )
@@ -62809,11 +67232,10 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
 stderr:
-  | 1//2
-./calc.at:1360:  $PREPARSER ./calc  input
+./calc.at:1358:  $PREPARSER ./calc  input
+./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -62920,89 +67342,16 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 23
-Stack now 0 8 23
-Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '/' (1.3: )
-Stack now 0
-./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 23
-Stack now 0 8 23
-Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '/' (1.3: )
+Now at end of input.
+1.1: syntax error, unexpected end of file
+Cleanup: discarding lookahead token end of file (1.1: )
 Stack now 0
-./calc.at:1358: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1358: cat stderr
-./calc.at:1360: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-./calc.at:1360: cat stderr
-  | (# + 1) = 1111
-./calc.at:1358:  $PREPARSER ./calc  input
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -63012,32 +67361,35 @@
 Entering state 4
 Stack now 0 4
 Return for a new token:
-1.2: syntax error: invalid character: '#'
 Reading a token
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
 Return for a new token:
 Reading a token
 Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Error: popping token error (1.2: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Return for a new token:
+1.6: syntax error: invalid character: '#'
+Reading a token
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
 Stack now 0 4
-Shifting token error (1.2-4: )
+Shifting token error (1.2-6: )
 Entering state 11
 Stack now 0 4 11
-Return for a new token:
-Reading a token
-Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
-Error: popping token error (1.2-4: )
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
 Stack now 0 4
 Shifting token error (1.2-6: )
 Entering state 11
@@ -63106,11 +67458,8 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | error
-./calc.at:1360:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -63119,84 +67468,123 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Stack now 0 4
-Return for a new token:
-1.2: syntax error: invalid character: '#'
 Reading a token
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Return for a new token:
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
 Reading a token
 Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Return for a new token:
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
 Reading a token
 Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
-Error: popping token error (1.2-4: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Return for a new token:
+Shifting token number (1.6: 1)
+Entering state 1
+Stack now 0 4 12 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 30
+Stack now 0 4 12 21 30
 Reading a token
 Next token is token ')' (1.7: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.7: )
 Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
+Entering state 27
+Stack now 0 4 12 27
+Reducing stack by rule 13 (line 126):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
+   $2 = nterm exp (1.2-6: 2)
    $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+-> $$ = nterm exp (1.1-7: 2)
 Entering state 8
 Stack now 0 8
-Return for a new token:
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
-Return for a new token:
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 23
+Stack now 0 8 23
 Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Stack now 0 8 23 4
+Reading a token
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
 Entering state 1
-Stack now 0 8 19 1
+Stack now 0 8 23 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Return for a new token:
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Stack now 0 8 23 4 12
 Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 20
+Stack now 0 8 23 4 12 20
+Reading a token
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Stack now 0 8 23 4 12 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 29
+Stack now 0 8 23 4 12 20 29
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Stack now 0 8 23 4 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 27
+Stack now 0 8 23 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 32
+Stack now 0 8 23 32
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 10 (line 115):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -63205,7 +67593,6 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
-Return for a new token:
 Reading a token
 Now at end of input.
 Shifting token end of file (2.1: )
@@ -63219,20 +67606,14 @@
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token invalid token (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token (1.1: )
+Now at end of input.
+1.1: syntax error, unexpected end of file
+Cleanup: discarding lookahead token end of file (1.1: )
 Stack now 0
-./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 1 + 2 * 3 + !* ++
+./calc.at:1360:  $PREPARSER ./calc  input
 stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token invalid token (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token (1.1: )
-Stack now 0
 ./calc.at:1358: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -63243,276 +67624,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1358: cat stderr
-./calc.at:1360: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1360: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1358:  $PREPARSER ./calc  input
-stderr:
-input:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Return for a new token:
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Return for a new token:
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Return for a new token:
-1.6: syntax error: invalid character: '#'
-Reading a token
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Return for a new token:
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-8: )
-Entering state 11
-Stack now 0 4 11
-Return for a new token:
-Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
-Error: popping token error (1.2-8: )
-Stack now 0 4
-Shifting token error (1.2-10: )
-Entering state 11
-Stack now 0 4 11
-Return for a new token:
-Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
-Stack now 0 8
-Return for a new token:
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 19
-Stack now 0 8 19
-Return for a new token:
-Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Return for a new token:
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Return for a new token:
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1 = 2 = 3
-./calc.at:1360:  $PREPARSER ./calc  input
-stderr:
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Return for a new token:
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Return for a new token:
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Return for a new token:
-1.6: syntax error: invalid character: '#'
-Reading a token
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Return for a new token:
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-8: )
-Entering state 11
-Stack now 0 4 11
-Return for a new token:
-Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
-Error: popping token error (1.2-8: )
-Stack now 0 4
-Shifting token error (1.2-10: )
-Entering state 11
-Stack now 0 4 11
-Return for a new token:
-Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
-Stack now 0 8
-Return for a new token:
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 19
-Stack now 0 8 19
-Return for a new token:
-Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Return for a new token:
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Return for a new token:
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
 Starting parse
 Entering state 0
 Stack now 0
@@ -63527,33 +67638,87 @@
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
-Entering state 19
-Stack now 0 8 19
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
 Next token is token number (1.5: 2)
 Shifting token number (1.5: 2)
 Entering state 1
-Stack now 0 8 19 1
+Stack now 0 8 21 1
 Reducing stack by rule 5 (line 101):
    $1 = token number (1.5: 2)
 -> $$ = nterm exp (1.5: 2)
-Entering state 28
-Stack now 0 8 19 28
+Entering state 30
+Stack now 0 8 21 30
 Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Stack now 0 8 19
-Error: popping token '=' (1.3: )
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
 Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '=' (1.7: )
-Stack now 0
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
+Reading a token
+Next token is token '*' (1.14: )
+Shifting token '*' (1.14: )
+Entering state 15
+Stack now 0 8 21 5 15
+Reducing stack by rule 19 (line 132):
+   $1 = token '!' (1.13: )
+   $2 = token '*' (1.14: )
+1.14: memory exhausted
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1358: cat stderr
 ./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1358: "$PERL" -pi -e 'use strict;
+./calc.at:1357: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1364: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
+./calc.at:1362: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -63563,6 +67728,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1362: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -63578,32 +67744,84 @@
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
-Entering state 19
-Stack now 0 8 19
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
 Next token is token number (1.5: 2)
 Shifting token number (1.5: 2)
 Entering state 1
-Stack now 0 8 19 1
+Stack now 0 8 21 1
 Reducing stack by rule 5 (line 101):
    $1 = token number (1.5: 2)
 -> $$ = nterm exp (1.5: 2)
-Entering state 28
-Stack now 0 8 19 28
+Entering state 30
+Stack now 0 8 21 30
 Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Stack now 0 8 19
-Error: popping token '=' (1.3: )
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
 Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '=' (1.7: )
-Stack now 0
-./calc.at:1358: cat stderr
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
+Reading a token
+Next token is token '*' (1.14: )
+Shifting token '*' (1.14: )
+Entering state 15
+Stack now 0 8 21 5 15
+Reducing stack by rule 19 (line 132):
+   $1 = token '!' (1.13: )
+   $2 = token '*' (1.14: )
+1.14: memory exhausted
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+input:
+./calc.at:1357: cat stderr
+input:
+stderr:
+  | (# + 1) = 1111
+./calc.at:1358:  $PREPARSER ./calc  input
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1362:  $PREPARSER ./calc  input
+stderr:
+stdout:
+487. calc.at:1357:  ok
 ./calc.at:1360: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -63614,12 +67832,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1360: cat stderr
-./calc.at:1358:  $PREPARSER ./calc  input
-stderr:
-input:
+./calc.at:1363: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
 Starting parse
 Entering state 0
 Stack now 0
@@ -63629,133 +67842,83 @@
 Entering state 4
 Stack now 0 4
 Return for a new token:
+1.2: syntax error: invalid character: '#'
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
 Return for a new token:
 Reading a token
 Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
+Error: discarding token '+' (1.4: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
 Return for a new token:
 Reading a token
 Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
-Entering state 1
-Stack now 0 4 12 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 30
-Stack now 0 4 12 21 30
+Error: discarding token number (1.6: 1)
+Error: popping token error (1.2-4: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
 Return for a new token:
 Reading a token
 Next token is token ')' (1.7: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' (1.7: )
 Shifting token ')' (1.7: )
-Entering state 27
-Stack now 0 4 12 27
-Reducing stack by rule 13 (line 126):
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
    $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
+   $2 = token error (1.2-6: )
    $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
 Stack now 0 8
 Return for a new token:
 Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 23
-Stack now 0 8 23
-Return for a new token:
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Stack now 0 8 23 4
-Return for a new token:
-Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
-Entering state 1
-Stack now 0 8 23 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Stack now 0 8 23 4 12
-Return for a new token:
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 20
-Stack now 0 8 23 4 12 20
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Stack now 0 8 19
 Return for a new token:
 Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
 Entering state 1
-Stack now 0 8 23 4 12 20 1
+Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 29
-Stack now 0 8 23 4 12 20 29
-Return for a new token:
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack by rule 8 (line 113):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Stack now 0 8 23 4 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 27
-Stack now 0 8 23 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 32
-Stack now 0 8 23 32
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Stack now 0 8 19 28
 Return for a new token:
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 10 (line 115):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -63774,37 +67937,19 @@
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 
-  | +1
-./calc.at:1360:  $PREPARSER ./calc  input
+./calc.at:1363: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c calc.h
+
 stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Stack now 0 3
-Reducing stack by rule 3 (line 96):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Stack now 0
-Cleanup: discarding lookahead token '+' (2.1: )
-Stack now 0
 stderr:
-./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -63814,133 +67959,83 @@
 Entering state 4
 Stack now 0 4
 Return for a new token:
+1.2: syntax error: invalid character: '#'
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
 Return for a new token:
 Reading a token
 Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
+Error: discarding token '+' (1.4: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
 Return for a new token:
 Reading a token
 Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
-Entering state 1
-Stack now 0 4 12 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 30
-Stack now 0 4 12 21 30
+Error: discarding token number (1.6: 1)
+Error: popping token error (1.2-4: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
 Return for a new token:
 Reading a token
 Next token is token ')' (1.7: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' (1.7: )
 Shifting token ')' (1.7: )
-Entering state 27
-Stack now 0 4 12 27
-Reducing stack by rule 13 (line 126):
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
    $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
+   $2 = token error (1.2-6: )
    $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
 Stack now 0 8
 Return for a new token:
 Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 23
-Stack now 0 8 23
-Return for a new token:
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Stack now 0 8 23 4
-Return for a new token:
-Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
-Entering state 1
-Stack now 0 8 23 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Stack now 0 8 23 4 12
-Return for a new token:
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 20
-Stack now 0 8 23 4 12 20
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Stack now 0 8 19
 Return for a new token:
 Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
 Entering state 1
-Stack now 0 8 23 4 12 20 1
+Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 29
-Stack now 0 8 23 4 12 20 29
-Return for a new token:
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack by rule 8 (line 113):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Stack now 0 8 23 4 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 27
-Stack now 0 8 23 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 32
-Stack now 0 8 23 32
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Stack now 0 8 19 28
 Return for a new token:
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 10 (line 115):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -63958,91 +68053,6 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Stack now 0 3
-Reducing stack by rule 3 (line 96):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Stack now 0
-Cleanup: discarding lookahead token '+' (2.1: )
-Stack now 0
-./calc.at:1358: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1360: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1360: cat stderr
-./calc.at:1358: cat stderr
-./calc.at:1360:  $PREPARSER ./calc  /dev/null
-488. calc.at:1358:  ok
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of file
-Cleanup: discarding lookahead token end of file (1.1: )
-Stack now 0
-./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of file
-Cleanup: discarding lookahead token end of file (1.1: )
-Stack now 0
-./calc.at:1360: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1360: cat stderr
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1360:  $PREPARSER ./calc  input
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -64360,16 +68370,24 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-492. calc.at:1364: testing Calculator %no-lines api.pure parse.error=verbose %debug %locations %defines api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
-./calc.at:1364:
-    if "$POSIXLY_CORRECT_IS_EXPORTED"; then
-      sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y
-    else
-      mv calc.y.tmp calc.y
-    fi
-
 
+input:
+./calc.at:1360: cat stderr
+./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1363:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -64688,2388 +68706,8 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1364: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-./calc.at:1360: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1360: cat stderr
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1360:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token '!' (1.2: )
-Shifting token '!' (1.2: )
-Entering state 5
-Stack now 0 4 5
-Reading a token
-Next token is token '!' (1.3: )
-Shifting token '!' (1.3: )
-Entering state 16
-Stack now 0 4 5 16
-Reducing stack by rule 16 (line 129):
-   $1 = token '!' (1.2: )
-   $2 = token '!' (1.3: )
-Stack now 0 4
-Shifting token error (1.2-3: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.4: )
-Shifting token ')' (1.4: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-3: )
-   $3 = token ')' (1.4: )
--> $$ = nterm exp (1.1-4: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.6: )
-Shifting token '+' (1.6: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.8: )
-Shifting token '(' (1.8: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token number (1.9: 1)
-Shifting token number (1.9: 1)
-Entering state 1
-Stack now 0 8 21 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 1)
--> $$ = nterm exp (1.9: 1)
-Entering state 12
-Stack now 0 8 21 4 12
-Reading a token
-Next token is token number (1.11: 2)
-1.11: syntax error, unexpected number
-Error: popping nterm exp (1.9: 1)
-Stack now 0 8 21 4
-Shifting token error (1.9-11: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token number (1.11: 2)
-Error: discarding token number (1.11: 2)
-Error: popping token error (1.9-11: )
-Stack now 0 8 21 4
-Shifting token error (1.9-11: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.12: )
-Shifting token ')' (1.12: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.8: )
-   $2 = token error (1.9-11: )
-   $3 = token ')' (1.12: )
--> $$ = nterm exp (1.8-12: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '=' (1.14: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-4: 1111)
-   $2 = token '+' (1.6: )
-   $3 = nterm exp (1.8-12: 1111)
--> $$ = nterm exp (1.1-12: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.14: )
-Shifting token '=' (1.14: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-12: 2222)
-   $2 = token '=' (1.14: )
-   $3 = nterm exp (1.16: 1)
-1.1-16: error: 2222 != 1
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token '!' (1.2: )
-Shifting token '!' (1.2: )
-Entering state 5
-Stack now 0 4 5
-Reading a token
-Next token is token '!' (1.3: )
-Shifting token '!' (1.3: )
-Entering state 16
-Stack now 0 4 5 16
-Reducing stack by rule 16 (line 129):
-   $1 = token '!' (1.2: )
-   $2 = token '!' (1.3: )
-Stack now 0 4
-Shifting token error (1.2-3: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.4: )
-Shifting token ')' (1.4: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-3: )
-   $3 = token ')' (1.4: )
--> $$ = nterm exp (1.1-4: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.6: )
-Shifting token '+' (1.6: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.8: )
-Shifting token '(' (1.8: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token number (1.9: 1)
-Shifting token number (1.9: 1)
-Entering state 1
-Stack now 0 8 21 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 1)
--> $$ = nterm exp (1.9: 1)
-Entering state 12
-Stack now 0 8 21 4 12
-Reading a token
-Next token is token number (1.11: 2)
-1.11: syntax error, unexpected number
-Error: popping nterm exp (1.9: 1)
-Stack now 0 8 21 4
-Shifting token error (1.9-11: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token number (1.11: 2)
-Error: discarding token number (1.11: 2)
-Error: popping token error (1.9-11: )
-Stack now 0 8 21 4
-Shifting token error (1.9-11: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.12: )
-Shifting token ')' (1.12: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.8: )
-   $2 = token error (1.9-11: )
-   $3 = token ')' (1.12: )
--> $$ = nterm exp (1.8-12: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '=' (1.14: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-4: 1111)
-   $2 = token '+' (1.6: )
-   $3 = nterm exp (1.8-12: 1111)
--> $$ = nterm exp (1.1-12: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.14: )
-Shifting token '=' (1.14: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-12: 2222)
-   $2 = token '=' (1.14: )
-   $3 = nterm exp (1.16: 1)
-1.1-16: error: 2222 != 1
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1360: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1360: cat stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1360:  $PREPARSER ./calc  input
-./calc.at:1364: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
-Entering state 2
-Stack now 0 4 2
-Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Stack now 0 4 2 9
-Reducing stack by rule 15 (line 128):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Error: popping token error (1.2-4: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token number (1.10: 1)
-Shifting token number (1.10: 1)
-Entering state 1
-Stack now 0 8 21 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.10: 1)
--> $$ = nterm exp (1.10: 1)
-Entering state 12
-Stack now 0 8 21 4 12
-Reading a token
-Next token is token number (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Stack now 0 8 21 4
-Shifting token error (1.10-12: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token number (1.12: 2)
-Error: discarding token number (1.12: 2)
-Error: popping token error (1.10-12: )
-Stack now 0 8 21 4
-Shifting token error (1.10-12: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '=' (1.15: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.17: 1)
-Shifting token number (1.17: 1)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.17: 1)
--> $$ = nterm exp (1.17: 1)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 2222)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
-Entering state 2
-Stack now 0 4 2
-Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Stack now 0 4 2 9
-Reducing stack by rule 15 (line 128):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Error: popping token error (1.2-4: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token number (1.10: 1)
-Shifting token number (1.10: 1)
-Entering state 1
-Stack now 0 8 21 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.10: 1)
--> $$ = nterm exp (1.10: 1)
-Entering state 12
-Stack now 0 8 21 4 12
-Reading a token
-Next token is token number (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Stack now 0 8 21 4
-Shifting token error (1.10-12: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token number (1.12: 2)
-Error: discarding token number (1.12: 2)
-Error: popping token error (1.10-12: )
-Stack now 0 8 21 4
-Shifting token error (1.10-12: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '=' (1.15: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.17: 1)
-Shifting token number (1.17: 1)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.17: 1)
--> $$ = nterm exp (1.17: 1)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 2222)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1360: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1360: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1360:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
-Error: popping token error (1.10: )
-Stack now 0 8 21 4
-Shifting token error (1.10: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
-Error: popping token error (1.16: )
-Stack now 0 8 21 4
-Shifting token error (1.16: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 3333)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
-Error: popping token error (1.10: )
-Stack now 0 8 21 4
-Shifting token error (1.10: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
-Error: popping token error (1.16: )
-Stack now 0 8 21 4
-Shifting token error (1.16: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 3333)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1360: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1360: cat stderr
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1360:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 114):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
-Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Stack now 0 8 21 5 14
-Reducing stack by rule 17 (line 130):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 114):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
-Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Stack now 0 8 21 5 14
-Reducing stack by rule 17 (line 130):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1360: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1360:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 114):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Stack now 0 8 21 5 13
-Reducing stack by rule 18 (line 131):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 114):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Stack now 0 8 21 5 13
-Reducing stack by rule 18 (line 131):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1360: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1360: cat stderr
-input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1360:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 114):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
-Reading a token
-Next token is token '*' (1.14: )
-Shifting token '*' (1.14: )
-Entering state 15
-Stack now 0 8 21 5 15
-Reducing stack by rule 19 (line 132):
-   $1 = token '!' (1.13: )
-   $2 = token '*' (1.14: )
-1.14: memory exhausted
-Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 114):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
-Reading a token
-Next token is token '*' (1.14: )
-Shifting token '*' (1.14: )
-Entering state 15
-Stack now 0 8 21 5 15
-Reducing stack by rule 19 (line 132):
-   $1 = token '!' (1.13: )
-   $2 = token '*' (1.14: )
-1.14: memory exhausted
-Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1360: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1360: cat stderr
-input:
-  | (#) + (#) = 2222
-./calc.at:1360:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-1.8: syntax error: invalid character: '#'
-Shifting token error (1.8: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token invalid token (1.8: )
-Error: discarding token invalid token (1.8: )
-Error: popping token error (1.8: )
-Stack now 0 8 21 4
-Shifting token error (1.8: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.13-16: 2222)
-Shifting token number (1.13-16: 2222)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-1.8: syntax error: invalid character: '#'
-Shifting token error (1.8: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token invalid token (1.8: )
-Error: discarding token invalid token (1.8: )
-Error: popping token error (1.8: )
-Stack now 0 8 21 4
-Shifting token error (1.8: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.13-16: 2222)
-Shifting token number (1.13-16: 2222)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1360: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1360: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1360:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1360: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1360: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1360:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
-Error: popping token error (1.2-4: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
-Error: popping token error (1.2-4: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1360: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1360: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1360:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-8: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
-Error: popping token error (1.2-8: )
-Stack now 0 4
-Shifting token error (1.2-10: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-8: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
-Error: popping token error (1.2-8: )
-Stack now 0 4
-Shifting token error (1.2-10: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
 stderr:
-stdout:
-./calc.at:1362: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
-./calc.at:1360: "$PERL" -pi -e 'use strict;
+./calc.at:1358: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -67079,189 +68717,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1362: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c calc.h
-
-./calc.at:1360: cat stderr
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1362:  $PREPARSER ./calc  input
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1360:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
-Entering state 1
-Stack now 0 4 12 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 30
-Stack now 0 4 12 21 30
-Reading a token
-Next token is token ')' (1.7: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 27
-Stack now 0 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 23
-Stack now 0 8 23
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Stack now 0 8 23 4
-Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
-Entering state 1
-Stack now 0 8 23 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Stack now 0 8 23 4 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 20
-Stack now 0 8 23 4 12 20
-Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
-Entering state 1
-Stack now 0 8 23 4 12 20 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 29
-Stack now 0 8 23 4 12 20 29
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack by rule 8 (line 113):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Stack now 0 8 23 4 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 27
-Stack now 0 8 23 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 32
-Stack now 0 8 23 32
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 10 (line 115):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Stack now 0 8
@@ -67271,12 +68736,12 @@
 Entering state 21
 Stack now 0 8 21
 Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
 Entering state 1
 Stack now 0 8 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
 -> $$ = nterm exp (1.5: 2)
 Entering state 30
 Stack now 0 8 21 30
@@ -67286,18 +68751,18 @@
 Entering state 22
 Stack now 0 8 21 30 22
 Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
 Entering state 1
 Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
 -> $$ = nterm exp (1.9: 3)
 Entering state 31
 Stack now 0 8 21 30 22 31
 Reading a token
 Next token is token '=' (1.11: )
-Reducing stack by rule 9 (line 114):
+Reducing stack by rule 9 (line 101):
    $1 = nterm exp (1.5: 2)
    $2 = token '*' (1.7: )
    $3 = nterm exp (1.9: 3)
@@ -67305,7 +68770,7 @@
 Entering state 30
 Stack now 0 8 21 30
 Next token is token '=' (1.11: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 99):
    $1 = nterm exp (1.1: 1)
    $2 = token '+' (1.3: )
    $3 = nterm exp (1.5-9: 6)
@@ -67317,18 +68782,18 @@
 Entering state 19
 Stack now 0 8 19
 Reading a token
-Next token is token number (1.13: 7)
-Shifting token number (1.13: 7)
+Next token is token "number" (1.13: 7)
+Shifting token "number" (1.13: 7)
 Entering state 1
 Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.13: 7)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.13: 7)
 -> $$ = nterm exp (1.13: 7)
 Entering state 28
 Stack now 0 8 19 28
 Reading a token
 Next token is token '\n' (1.14-2.0: )
-Reducing stack by rule 6 (line 102):
+Reducing stack by rule 6 (line 89):
    $1 = nterm exp (1.1-9: 7)
    $2 = token '=' (1.11: )
    $3 = nterm exp (1.13: 7)
@@ -67339,24 +68804,24 @@
 Shifting token '\n' (1.14-2.0: )
 Entering state 25
 Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
+Reducing stack by rule 4 (line 84):
    $1 = nterm exp (1.1-13: 7)
    $2 = token '\n' (1.14-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
-Reducing stack by rule 1 (line 91):
+Reducing stack by rule 1 (line 78):
    $1 = nterm line (1.1-2.0: )
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
 Reading a token
-Next token is token number (2.1: 1)
-Shifting token number (2.1: 1)
+Next token is token "number" (2.1: 1)
+Shifting token "number" (2.1: 1)
 Entering state 1
 Stack now 0 6 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (2.1: 1)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (2.1: 1)
 -> $$ = nterm exp (2.1: 1)
 Entering state 8
 Stack now 0 6 8
@@ -67366,12 +68831,12 @@
 Entering state 21
 Stack now 0 6 8 21
 Reading a token
-Next token is token number (2.5: 2)
-Shifting token number (2.5: 2)
+Next token is token "number" (2.5: 2)
+Shifting token "number" (2.5: 2)
 Entering state 1
 Stack now 0 6 8 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (2.5: 2)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (2.5: 2)
 -> $$ = nterm exp (2.5: 2)
 Entering state 30
 Stack now 0 6 8 21 30
@@ -67386,25 +68851,25 @@
 Entering state 2
 Stack now 0 6 8 21 30 22 2
 Reading a token
-Next token is token number (2.10: 3)
-Shifting token number (2.10: 3)
+Next token is token "number" (2.10: 3)
+Shifting token "number" (2.10: 3)
 Entering state 1
 Stack now 0 6 8 21 30 22 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (2.10: 3)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (2.10: 3)
 -> $$ = nterm exp (2.10: 3)
 Entering state 10
 Stack now 0 6 8 21 30 22 2 10
 Reading a token
 Next token is token '=' (2.12: )
-Reducing stack by rule 11 (line 124):
+Reducing stack by rule 11 (line 111):
    $1 = token '-' (2.9: )
    $2 = nterm exp (2.10: 3)
 -> $$ = nterm exp (2.9-10: -3)
 Entering state 31
 Stack now 0 6 8 21 30 22 31
 Next token is token '=' (2.12: )
-Reducing stack by rule 9 (line 114):
+Reducing stack by rule 9 (line 101):
    $1 = nterm exp (2.5: 2)
    $2 = token '*' (2.7: )
    $3 = nterm exp (2.9-10: -3)
@@ -67412,7 +68877,7 @@
 Entering state 30
 Stack now 0 6 8 21 30
 Next token is token '=' (2.12: )
-Reducing stack by rule 7 (line 112):
+Reducing stack by rule 7 (line 99):
    $1 = nterm exp (2.1: 1)
    $2 = token '+' (2.3: )
    $3 = nterm exp (2.5-10: -6)
@@ -67429,25 +68894,25 @@
 Entering state 2
 Stack now 0 6 8 19 2
 Reading a token
-Next token is token number (2.15: 5)
-Shifting token number (2.15: 5)
+Next token is token "number" (2.15: 5)
+Shifting token "number" (2.15: 5)
 Entering state 1
 Stack now 0 6 8 19 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (2.15: 5)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (2.15: 5)
 -> $$ = nterm exp (2.15: 5)
 Entering state 10
 Stack now 0 6 8 19 2 10
 Reading a token
 Next token is token '\n' (2.16-3.0: )
-Reducing stack by rule 11 (line 124):
+Reducing stack by rule 11 (line 111):
    $1 = token '-' (2.14: )
    $2 = nterm exp (2.15: 5)
 -> $$ = nterm exp (2.14-15: -5)
 Entering state 28
 Stack now 0 6 8 19 28
 Next token is token '\n' (2.16-3.0: )
-Reducing stack by rule 6 (line 102):
+Reducing stack by rule 6 (line 89):
    $1 = nterm exp (2.1-10: -5)
    $2 = token '=' (2.12: )
    $3 = nterm exp (2.14-15: -5)
@@ -67458,13 +68923,13 @@
 Shifting token '\n' (2.16-3.0: )
 Entering state 25
 Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
+Reducing stack by rule 4 (line 84):
    $1 = nterm exp (2.1-15: -5)
    $2 = token '\n' (2.16-3.0: )
 -> $$ = nterm line (2.1-3.0: )
 Entering state 18
 Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
+Reducing stack by rule 2 (line 79):
    $1 = nterm input (1.1-2.0: )
    $2 = nterm line (2.1-3.0: )
 -> $$ = nterm input (1.1-3.0: )
@@ -67475,12 +68940,12 @@
 Shifting token '\n' (3.1-4.0: )
 Entering state 3
 Stack now 0 6 3
-Reducing stack by rule 3 (line 96):
+Reducing stack by rule 3 (line 83):
    $1 = token '\n' (3.1-4.0: )
 -> $$ = nterm line (3.1-4.0: )
 Entering state 18
 Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
+Reducing stack by rule 2 (line 79):
    $1 = nterm input (1.1-3.0: )
    $2 = nterm line (3.1-4.0: )
 -> $$ = nterm input (1.1-4.0: )
@@ -67492,12 +68957,12 @@
 Entering state 2
 Stack now 0 6 2
 Reading a token
-Next token is token number (4.2: 1)
-Shifting token number (4.2: 1)
+Next token is token "number" (4.2: 1)
+Shifting token "number" (4.2: 1)
 Entering state 1
 Stack now 0 6 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (4.2: 1)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (4.2: 1)
 -> $$ = nterm exp (4.2: 1)
 Entering state 10
 Stack now 0 6 2 10
@@ -67507,18 +68972,18 @@
 Entering state 24
 Stack now 0 6 2 10 24
 Reading a token
-Next token is token number (4.4: 2)
-Shifting token number (4.4: 2)
+Next token is token "number" (4.4: 2)
+Shifting token "number" (4.4: 2)
 Entering state 1
 Stack now 0 6 2 10 24 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (4.4: 2)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (4.4: 2)
 -> $$ = nterm exp (4.4: 2)
 Entering state 33
 Stack now 0 6 2 10 24 33
 Reading a token
 Next token is token '=' (4.6: )
-Reducing stack by rule 12 (line 125):
+Reducing stack by rule 12 (line 112):
    $1 = nterm exp (4.2: 1)
    $2 = token '^' (4.3: )
    $3 = nterm exp (4.4: 2)
@@ -67526,7 +68991,7 @@
 Entering state 10
 Stack now 0 6 2 10
 Next token is token '=' (4.6: )
-Reducing stack by rule 11 (line 124):
+Reducing stack by rule 11 (line 111):
    $1 = token '-' (4.1: )
    $2 = nterm exp (4.2-4: 1)
 -> $$ = nterm exp (4.1-4: -1)
@@ -67542,25 +69007,25 @@
 Entering state 2
 Stack now 0 6 8 19 2
 Reading a token
-Next token is token number (4.9: 1)
-Shifting token number (4.9: 1)
+Next token is token "number" (4.9: 1)
+Shifting token "number" (4.9: 1)
 Entering state 1
 Stack now 0 6 8 19 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (4.9: 1)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (4.9: 1)
 -> $$ = nterm exp (4.9: 1)
 Entering state 10
 Stack now 0 6 8 19 2 10
 Reading a token
 Next token is token '\n' (4.10-5.0: )
-Reducing stack by rule 11 (line 124):
+Reducing stack by rule 11 (line 111):
    $1 = token '-' (4.8: )
    $2 = nterm exp (4.9: 1)
 -> $$ = nterm exp (4.8-9: -1)
 Entering state 28
 Stack now 0 6 8 19 28
 Next token is token '\n' (4.10-5.0: )
-Reducing stack by rule 6 (line 102):
+Reducing stack by rule 6 (line 89):
    $1 = nterm exp (4.1-4: -1)
    $2 = token '=' (4.6: )
    $3 = nterm exp (4.8-9: -1)
@@ -67571,13 +69036,13 @@
 Shifting token '\n' (4.10-5.0: )
 Entering state 25
 Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
+Reducing stack by rule 4 (line 84):
    $1 = nterm exp (4.1-9: -1)
    $2 = token '\n' (4.10-5.0: )
 -> $$ = nterm line (4.1-5.0: )
 Entering state 18
 Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
+Reducing stack by rule 2 (line 79):
    $1 = nterm input (1.1-4.0: )
    $2 = nterm line (4.1-5.0: )
 -> $$ = nterm input (1.1-5.0: )
@@ -67594,18 +69059,18 @@
 Entering state 2
 Stack now 0 6 4 2
 Reading a token
-Next token is token number (5.3: 1)
-Shifting token number (5.3: 1)
+Next token is token "number" (5.3: 1)
+Shifting token "number" (5.3: 1)
 Entering state 1
 Stack now 0 6 4 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (5.3: 1)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (5.3: 1)
 -> $$ = nterm exp (5.3: 1)
 Entering state 10
 Stack now 0 6 4 2 10
 Reading a token
 Next token is token ')' (5.4: )
-Reducing stack by rule 11 (line 124):
+Reducing stack by rule 11 (line 111):
    $1 = token '-' (5.2: )
    $2 = nterm exp (5.3: 1)
 -> $$ = nterm exp (5.2-3: -1)
@@ -67615,7 +69080,7 @@
 Shifting token ')' (5.4: )
 Entering state 27
 Stack now 0 6 4 12 27
-Reducing stack by rule 13 (line 126):
+Reducing stack by rule 13 (line 113):
    $1 = token '(' (5.1: )
    $2 = nterm exp (5.2-3: -1)
    $3 = token ')' (5.4: )
@@ -67628,18 +69093,18 @@
 Entering state 24
 Stack now 0 6 8 24
 Reading a token
-Next token is token number (5.6: 2)
-Shifting token number (5.6: 2)
+Next token is token "number" (5.6: 2)
+Shifting token "number" (5.6: 2)
 Entering state 1
 Stack now 0 6 8 24 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (5.6: 2)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (5.6: 2)
 -> $$ = nterm exp (5.6: 2)
 Entering state 33
 Stack now 0 6 8 24 33
 Reading a token
 Next token is token '=' (5.8: )
-Reducing stack by rule 12 (line 125):
+Reducing stack by rule 12 (line 112):
    $1 = nterm exp (5.1-4: -1)
    $2 = token '^' (5.5: )
    $3 = nterm exp (5.6: 2)
@@ -67651,18 +69116,18 @@
 Entering state 19
 Stack now 0 6 8 19
 Reading a token
-Next token is token number (5.10: 1)
-Shifting token number (5.10: 1)
+Next token is token "number" (5.10: 1)
+Shifting token "number" (5.10: 1)
 Entering state 1
 Stack now 0 6 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (5.10: 1)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (5.10: 1)
 -> $$ = nterm exp (5.10: 1)
 Entering state 28
 Stack now 0 6 8 19 28
 Reading a token
 Next token is token '\n' (5.11-6.0: )
-Reducing stack by rule 6 (line 102):
+Reducing stack by rule 6 (line 89):
    $1 = nterm exp (5.1-6: 1)
    $2 = token '=' (5.8: )
    $3 = nterm exp (5.10: 1)
@@ -67673,13 +69138,13 @@
 Shifting token '\n' (5.11-6.0: )
 Entering state 25
 Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
+Reducing stack by rule 4 (line 84):
    $1 = nterm exp (5.1-10: 1)
    $2 = token '\n' (5.11-6.0: )
 -> $$ = nterm line (5.1-6.0: )
 Entering state 18
 Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
+Reducing stack by rule 2 (line 79):
    $1 = nterm input (1.1-5.0: )
    $2 = nterm line (5.1-6.0: )
 -> $$ = nterm input (1.1-6.0: )
@@ -67690,12 +69155,12 @@
 Shifting token '\n' (6.1-7.0: )
 Entering state 3
 Stack now 0 6 3
-Reducing stack by rule 3 (line 96):
+Reducing stack by rule 3 (line 83):
    $1 = token '\n' (6.1-7.0: )
 -> $$ = nterm line (6.1-7.0: )
 Entering state 18
 Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
+Reducing stack by rule 2 (line 79):
    $1 = nterm input (1.1-6.0: )
    $2 = nterm line (6.1-7.0: )
 -> $$ = nterm input (1.1-7.0: )
@@ -67717,32 +69182,32 @@
 Entering state 2
 Stack now 0 6 2 2 2
 Reading a token
-Next token is token number (7.4: 1)
-Shifting token number (7.4: 1)
+Next token is token "number" (7.4: 1)
+Shifting token "number" (7.4: 1)
 Entering state 1
 Stack now 0 6 2 2 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (7.4: 1)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (7.4: 1)
 -> $$ = nterm exp (7.4: 1)
 Entering state 10
 Stack now 0 6 2 2 2 10
 Reading a token
 Next token is token '=' (7.6: )
-Reducing stack by rule 11 (line 124):
+Reducing stack by rule 11 (line 111):
    $1 = token '-' (7.3: )
    $2 = nterm exp (7.4: 1)
 -> $$ = nterm exp (7.3-4: -1)
 Entering state 10
 Stack now 0 6 2 2 10
 Next token is token '=' (7.6: )
-Reducing stack by rule 11 (line 124):
+Reducing stack by rule 11 (line 111):
    $1 = token '-' (7.2: )
    $2 = nterm exp (7.3-4: -1)
 -> $$ = nterm exp (7.2-4: 1)
 Entering state 10
 Stack now 0 6 2 10
 Next token is token '=' (7.6: )
-Reducing stack by rule 11 (line 124):
+Reducing stack by rule 11 (line 111):
    $1 = token '-' (7.1: )
    $2 = nterm exp (7.2-4: 1)
 -> $$ = nterm exp (7.1-4: -1)
@@ -67758,25 +69223,25 @@
 Entering state 2
 Stack now 0 6 8 19 2
 Reading a token
-Next token is token number (7.9: 1)
-Shifting token number (7.9: 1)
+Next token is token "number" (7.9: 1)
+Shifting token "number" (7.9: 1)
 Entering state 1
 Stack now 0 6 8 19 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (7.9: 1)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (7.9: 1)
 -> $$ = nterm exp (7.9: 1)
 Entering state 10
 Stack now 0 6 8 19 2 10
 Reading a token
 Next token is token '\n' (7.10-8.0: )
-Reducing stack by rule 11 (line 124):
+Reducing stack by rule 11 (line 111):
    $1 = token '-' (7.8: )
    $2 = nterm exp (7.9: 1)
 -> $$ = nterm exp (7.8-9: -1)
 Entering state 28
 Stack now 0 6 8 19 28
 Next token is token '\n' (7.10-8.0: )
-Reducing stack by rule 6 (line 102):
+Reducing stack by rule 6 (line 89):
    $1 = nterm exp (7.1-4: -1)
    $2 = token '=' (7.6: )
    $3 = nterm exp (7.8-9: -1)
@@ -67787,13 +69252,13 @@
 Shifting token '\n' (7.10-8.0: )
 Entering state 25
 Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
+Reducing stack by rule 4 (line 84):
    $1 = nterm exp (7.1-9: -1)
    $2 = token '\n' (7.10-8.0: )
 -> $$ = nterm line (7.1-8.0: )
 Entering state 18
 Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
+Reducing stack by rule 2 (line 79):
    $1 = nterm input (1.1-7.0: )
    $2 = nterm line (7.1-8.0: )
 -> $$ = nterm input (1.1-8.0: )
@@ -67804,24 +69269,24 @@
 Shifting token '\n' (8.1-9.0: )
 Entering state 3
 Stack now 0 6 3
-Reducing stack by rule 3 (line 96):
+Reducing stack by rule 3 (line 83):
    $1 = token '\n' (8.1-9.0: )
 -> $$ = nterm line (8.1-9.0: )
 Entering state 18
 Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
+Reducing stack by rule 2 (line 79):
    $1 = nterm input (1.1-8.0: )
    $2 = nterm line (8.1-9.0: )
 -> $$ = nterm input (1.1-9.0: )
 Entering state 6
 Stack now 0 6
 Reading a token
-Next token is token number (9.1: 1)
-Shifting token number (9.1: 1)
+Next token is token "number" (9.1: 1)
+Shifting token "number" (9.1: 1)
 Entering state 1
 Stack now 0 6 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (9.1: 1)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (9.1: 1)
 -> $$ = nterm exp (9.1: 1)
 Entering state 8
 Stack now 0 6 8
@@ -67831,18 +69296,18 @@
 Entering state 20
 Stack now 0 6 8 20
 Reading a token
-Next token is token number (9.5: 2)
-Shifting token number (9.5: 2)
+Next token is token "number" (9.5: 2)
+Shifting token "number" (9.5: 2)
 Entering state 1
 Stack now 0 6 8 20 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (9.5: 2)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (9.5: 2)
 -> $$ = nterm exp (9.5: 2)
 Entering state 29
 Stack now 0 6 8 20 29
 Reading a token
 Next token is token '-' (9.7: )
-Reducing stack by rule 8 (line 113):
+Reducing stack by rule 8 (line 100):
    $1 = nterm exp (9.1: 1)
    $2 = token '-' (9.3: )
    $3 = nterm exp (9.5: 2)
@@ -67854,18 +69319,18 @@
 Entering state 20
 Stack now 0 6 8 20
 Reading a token
-Next token is token number (9.9: 3)
-Shifting token number (9.9: 3)
+Next token is token "number" (9.9: 3)
+Shifting token "number" (9.9: 3)
 Entering state 1
 Stack now 0 6 8 20 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (9.9: 3)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (9.9: 3)
 -> $$ = nterm exp (9.9: 3)
 Entering state 29
 Stack now 0 6 8 20 29
 Reading a token
 Next token is token '=' (9.11: )
-Reducing stack by rule 8 (line 113):
+Reducing stack by rule 8 (line 100):
    $1 = nterm exp (9.1-5: -1)
    $2 = token '-' (9.7: )
    $3 = nterm exp (9.9: 3)
@@ -67882,25 +69347,25 @@
 Entering state 2
 Stack now 0 6 8 19 2
 Reading a token
-Next token is token number (9.14: 4)
-Shifting token number (9.14: 4)
+Next token is token "number" (9.14: 4)
+Shifting token "number" (9.14: 4)
 Entering state 1
 Stack now 0 6 8 19 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (9.14: 4)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (9.14: 4)
 -> $$ = nterm exp (9.14: 4)
 Entering state 10
 Stack now 0 6 8 19 2 10
 Reading a token
 Next token is token '\n' (9.15-10.0: )
-Reducing stack by rule 11 (line 124):
+Reducing stack by rule 11 (line 111):
    $1 = token '-' (9.13: )
    $2 = nterm exp (9.14: 4)
 -> $$ = nterm exp (9.13-14: -4)
 Entering state 28
 Stack now 0 6 8 19 28
 Next token is token '\n' (9.15-10.0: )
-Reducing stack by rule 6 (line 102):
+Reducing stack by rule 6 (line 89):
    $1 = nterm exp (9.1-9: -4)
    $2 = token '=' (9.11: )
    $3 = nterm exp (9.13-14: -4)
@@ -67911,25 +69376,25 @@
 Shifting token '\n' (9.15-10.0: )
 Entering state 25
 Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
+Reducing stack by rule 4 (line 84):
    $1 = nterm exp (9.1-14: -4)
    $2 = token '\n' (9.15-10.0: )
 -> $$ = nterm line (9.1-10.0: )
 Entering state 18
 Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
+Reducing stack by rule 2 (line 79):
    $1 = nterm input (1.1-9.0: )
    $2 = nterm line (9.1-10.0: )
 -> $$ = nterm input (1.1-10.0: )
 Entering state 6
 Stack now 0 6
 Reading a token
-Next token is token number (10.1: 1)
-Shifting token number (10.1: 1)
+Next token is token "number" (10.1: 1)
+Shifting token "number" (10.1: 1)
 Entering state 1
 Stack now 0 6 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (10.1: 1)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (10.1: 1)
 -> $$ = nterm exp (10.1: 1)
 Entering state 8
 Stack now 0 6 8
@@ -67944,12 +69409,12 @@
 Entering state 4
 Stack now 0 6 8 20 4
 Reading a token
-Next token is token number (10.6: 2)
-Shifting token number (10.6: 2)
+Next token is token "number" (10.6: 2)
+Shifting token "number" (10.6: 2)
 Entering state 1
 Stack now 0 6 8 20 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (10.6: 2)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (10.6: 2)
 -> $$ = nterm exp (10.6: 2)
 Entering state 12
 Stack now 0 6 8 20 4 12
@@ -67959,18 +69424,18 @@
 Entering state 20
 Stack now 0 6 8 20 4 12 20
 Reading a token
-Next token is token number (10.10: 3)
-Shifting token number (10.10: 3)
+Next token is token "number" (10.10: 3)
+Shifting token "number" (10.10: 3)
 Entering state 1
 Stack now 0 6 8 20 4 12 20 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (10.10: 3)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (10.10: 3)
 -> $$ = nterm exp (10.10: 3)
 Entering state 29
 Stack now 0 6 8 20 4 12 20 29
 Reading a token
 Next token is token ')' (10.11: )
-Reducing stack by rule 8 (line 113):
+Reducing stack by rule 8 (line 100):
    $1 = nterm exp (10.6: 2)
    $2 = token '-' (10.8: )
    $3 = nterm exp (10.10: 3)
@@ -67981,7 +69446,7 @@
 Shifting token ')' (10.11: )
 Entering state 27
 Stack now 0 6 8 20 4 12 27
-Reducing stack by rule 13 (line 126):
+Reducing stack by rule 13 (line 113):
    $1 = token '(' (10.5: )
    $2 = nterm exp (10.6-10: -1)
    $3 = token ')' (10.11: )
@@ -67990,7 +69455,7 @@
 Stack now 0 6 8 20 29
 Reading a token
 Next token is token '=' (10.13: )
-Reducing stack by rule 8 (line 113):
+Reducing stack by rule 8 (line 100):
    $1 = nterm exp (10.1: 1)
    $2 = token '-' (10.3: )
    $3 = nterm exp (10.5-11: -1)
@@ -68002,18 +69467,18 @@
 Entering state 19
 Stack now 0 6 8 19
 Reading a token
-Next token is token number (10.15: 2)
-Shifting token number (10.15: 2)
+Next token is token "number" (10.15: 2)
+Shifting token "number" (10.15: 2)
 Entering state 1
 Stack now 0 6 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (10.15: 2)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (10.15: 2)
 -> $$ = nterm exp (10.15: 2)
 Entering state 28
 Stack now 0 6 8 19 28
 Reading a token
 Next token is token '\n' (10.16-11.0: )
-Reducing stack by rule 6 (line 102):
+Reducing stack by rule 6 (line 89):
    $1 = nterm exp (10.1-11: 2)
    $2 = token '=' (10.13: )
    $3 = nterm exp (10.15: 2)
@@ -68024,13 +69489,13 @@
 Shifting token '\n' (10.16-11.0: )
 Entering state 25
 Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
+Reducing stack by rule 4 (line 84):
    $1 = nterm exp (10.1-15: 2)
    $2 = token '\n' (10.16-11.0: )
 -> $$ = nterm line (10.1-11.0: )
 Entering state 18
 Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
+Reducing stack by rule 2 (line 79):
    $1 = nterm input (1.1-10.0: )
    $2 = nterm line (10.1-11.0: )
 -> $$ = nterm input (1.1-11.0: )
@@ -68041,24 +69506,24 @@
 Shifting token '\n' (11.1-12.0: )
 Entering state 3
 Stack now 0 6 3
-Reducing stack by rule 3 (line 96):
+Reducing stack by rule 3 (line 83):
    $1 = token '\n' (11.1-12.0: )
 -> $$ = nterm line (11.1-12.0: )
 Entering state 18
 Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
+Reducing stack by rule 2 (line 79):
    $1 = nterm input (1.1-11.0: )
    $2 = nterm line (11.1-12.0: )
 -> $$ = nterm input (1.1-12.0: )
 Entering state 6
 Stack now 0 6
 Reading a token
-Next token is token number (12.1: 2)
-Shifting token number (12.1: 2)
+Next token is token "number" (12.1: 2)
+Shifting token "number" (12.1: 2)
 Entering state 1
 Stack now 0 6 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (12.1: 2)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (12.1: 2)
 -> $$ = nterm exp (12.1: 2)
 Entering state 8
 Stack now 0 6 8
@@ -68068,12 +69533,12 @@
 Entering state 24
 Stack now 0 6 8 24
 Reading a token
-Next token is token number (12.3: 2)
-Shifting token number (12.3: 2)
+Next token is token "number" (12.3: 2)
+Shifting token "number" (12.3: 2)
 Entering state 1
 Stack now 0 6 8 24 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (12.3: 2)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (12.3: 2)
 -> $$ = nterm exp (12.3: 2)
 Entering state 33
 Stack now 0 6 8 24 33
@@ -68083,18 +69548,18 @@
 Entering state 24
 Stack now 0 6 8 24 33 24
 Reading a token
-Next token is token number (12.5: 3)
-Shifting token number (12.5: 3)
+Next token is token "number" (12.5: 3)
+Shifting token "number" (12.5: 3)
 Entering state 1
 Stack now 0 6 8 24 33 24 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (12.5: 3)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (12.5: 3)
 -> $$ = nterm exp (12.5: 3)
 Entering state 33
 Stack now 0 6 8 24 33 24 33
 Reading a token
 Next token is token '=' (12.7: )
-Reducing stack by rule 12 (line 125):
+Reducing stack by rule 12 (line 112):
    $1 = nterm exp (12.3: 2)
    $2 = token '^' (12.4: )
    $3 = nterm exp (12.5: 3)
@@ -68102,7 +69567,7 @@
 Entering state 33
 Stack now 0 6 8 24 33
 Next token is token '=' (12.7: )
-Reducing stack by rule 12 (line 125):
+Reducing stack by rule 12 (line 112):
    $1 = nterm exp (12.1: 2)
    $2 = token '^' (12.2: )
    $3 = nterm exp (12.3-5: 8)
@@ -68114,18 +69579,18 @@
 Entering state 19
 Stack now 0 6 8 19
 Reading a token
-Next token is token number (12.9-11: 256)
-Shifting token number (12.9-11: 256)
+Next token is token "number" (12.9-11: 256)
+Shifting token "number" (12.9-11: 256)
 Entering state 1
 Stack now 0 6 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (12.9-11: 256)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (12.9-11: 256)
 -> $$ = nterm exp (12.9-11: 256)
 Entering state 28
 Stack now 0 6 8 19 28
 Reading a token
 Next token is token '\n' (12.12-13.0: )
-Reducing stack by rule 6 (line 102):
+Reducing stack by rule 6 (line 89):
    $1 = nterm exp (12.1-5: 256)
    $2 = token '=' (12.7: )
    $3 = nterm exp (12.9-11: 256)
@@ -68136,13 +69601,13 @@
 Shifting token '\n' (12.12-13.0: )
 Entering state 25
 Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
+Reducing stack by rule 4 (line 84):
    $1 = nterm exp (12.1-11: 256)
    $2 = token '\n' (12.12-13.0: )
 -> $$ = nterm line (12.1-13.0: )
 Entering state 18
 Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
+Reducing stack by rule 2 (line 79):
    $1 = nterm input (1.1-12.0: )
    $2 = nterm line (12.1-13.0: )
 -> $$ = nterm input (1.1-13.0: )
@@ -68154,12 +69619,12 @@
 Entering state 4
 Stack now 0 6 4
 Reading a token
-Next token is token number (13.2: 2)
-Shifting token number (13.2: 2)
+Next token is token "number" (13.2: 2)
+Shifting token "number" (13.2: 2)
 Entering state 1
 Stack now 0 6 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (13.2: 2)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (13.2: 2)
 -> $$ = nterm exp (13.2: 2)
 Entering state 12
 Stack now 0 6 4 12
@@ -68169,18 +69634,18 @@
 Entering state 24
 Stack now 0 6 4 12 24
 Reading a token
-Next token is token number (13.4: 2)
-Shifting token number (13.4: 2)
+Next token is token "number" (13.4: 2)
+Shifting token "number" (13.4: 2)
 Entering state 1
 Stack now 0 6 4 12 24 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (13.4: 2)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (13.4: 2)
 -> $$ = nterm exp (13.4: 2)
 Entering state 33
 Stack now 0 6 4 12 24 33
 Reading a token
 Next token is token ')' (13.5: )
-Reducing stack by rule 12 (line 125):
+Reducing stack by rule 12 (line 112):
    $1 = nterm exp (13.2: 2)
    $2 = token '^' (13.3: )
    $3 = nterm exp (13.4: 2)
@@ -68191,7 +69656,7 @@
 Shifting token ')' (13.5: )
 Entering state 27
 Stack now 0 6 4 12 27
-Reducing stack by rule 13 (line 126):
+Reducing stack by rule 13 (line 113):
    $1 = token '(' (13.1: )
    $2 = nterm exp (13.2-4: 4)
    $3 = token ')' (13.5: )
@@ -68204,18 +69669,18 @@
 Entering state 24
 Stack now 0 6 8 24
 Reading a token
-Next token is token number (13.7: 3)
-Shifting token number (13.7: 3)
+Next token is token "number" (13.7: 3)
+Shifting token "number" (13.7: 3)
 Entering state 1
 Stack now 0 6 8 24 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (13.7: 3)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (13.7: 3)
 -> $$ = nterm exp (13.7: 3)
 Entering state 33
 Stack now 0 6 8 24 33
 Reading a token
 Next token is token '=' (13.9: )
-Reducing stack by rule 12 (line 125):
+Reducing stack by rule 12 (line 112):
    $1 = nterm exp (13.1-5: 4)
    $2 = token '^' (13.6: )
    $3 = nterm exp (13.7: 3)
@@ -68227,18 +69692,18 @@
 Entering state 19
 Stack now 0 6 8 19
 Reading a token
-Next token is token number (13.11-12: 64)
-Shifting token number (13.11-12: 64)
+Next token is token "number" (13.11-12: 64)
+Shifting token "number" (13.11-12: 64)
 Entering state 1
 Stack now 0 6 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (13.11-12: 64)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (13.11-12: 64)
 -> $$ = nterm exp (13.11-12: 64)
 Entering state 28
 Stack now 0 6 8 19 28
 Reading a token
 Next token is token '\n' (13.13-14.0: )
-Reducing stack by rule 6 (line 102):
+Reducing stack by rule 6 (line 89):
    $1 = nterm exp (13.1-7: 64)
    $2 = token '=' (13.9: )
    $3 = nterm exp (13.11-12: 64)
@@ -68249,13 +69714,13 @@
 Shifting token '\n' (13.13-14.0: )
 Entering state 25
 Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
+Reducing stack by rule 4 (line 84):
    $1 = nterm exp (13.1-12: 64)
    $2 = token '\n' (13.13-14.0: )
 -> $$ = nterm line (13.1-14.0: )
 Entering state 18
 Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
+Reducing stack by rule 2 (line 79):
    $1 = nterm input (1.1-13.0: )
    $2 = nterm line (13.1-14.0: )
 -> $$ = nterm input (1.1-14.0: )
@@ -68263,1159 +69728,133 @@
 Stack now 0 6
 Reading a token
 Now at end of input.
-Shifting token end of file (14.1: )
+Shifting token "end of input" (14.1: )
 Entering state 17
 Stack now 0 6 17
 Stack now 0 6 17
-Cleanup: popping token end of file (14.1: )
+Cleanup: popping token "end of input" (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
-./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+input:
+./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (#) + (#) = 2222
+./calc.at:1360:  $PREPARSER ./calc  input
+./calc.at:1358: cat stderr
 stderr:
+./calc.at:1362: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
 Entering state 21
 Stack now 0 8 21
 Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
+Entering state 4
+Stack now 0 8 21 4
 Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
+1.8: syntax error: invalid character: '#'
+Shifting token error (1.8: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token invalid token (1.8: )
+Error: discarding token invalid token (1.8: )
+Error: popping token error (1.8: )
+Stack now 0 8 21 4
+Shifting token error (1.8: )
+Entering state 11
+Stack now 0 8 21 4 11
 Reading a token
-Next token is token '=' (1.11: )
-Reducing stack by rule 9 (line 114):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.7: )
+   $2 = token error (1.8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
 Entering state 30
 Stack now 0 8 21 30
+Reading a token
 Next token is token '=' (1.11: )
 Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
+Entering state 8
 Stack now 0 8
 Next token is token '=' (1.11: )
 Shifting token '=' (1.11: )
 Entering state 19
 Stack now 0 8 19
 Reading a token
-Next token is token number (1.13: 7)
-Shifting token number (1.13: 7)
+Next token is token number (1.13-16: 2222)
+Shifting token number (1.13-16: 2222)
 Entering state 1
 Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.13: 7)
--> $$ = nterm exp (1.13: 7)
+   $1 = token number (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
 Entering state 28
 Stack now 0 8 19 28
 Reading a token
-Next token is token '\n' (1.14-2.0: )
+Next token is token '\n' (1.17-2.0: )
 Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-9: 7)
+   $1 = nterm exp (1.1-9: 2222)
    $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13: 7)
--> $$ = nterm exp (1.1-13: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.14-2.0: )
-Shifting token '\n' (1.14-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-13: 7)
-   $2 = token '\n' (1.14-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token number (2.1: 1)
-Shifting token number (2.1: 1)
-Entering state 1
-Stack now 0 6 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (2.1: 1)
--> $$ = nterm exp (2.1: 1)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '+' (2.3: )
-Shifting token '+' (2.3: )
-Entering state 21
-Stack now 0 6 8 21
-Reading a token
-Next token is token number (2.5: 2)
-Shifting token number (2.5: 2)
-Entering state 1
-Stack now 0 6 8 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (2.5: 2)
--> $$ = nterm exp (2.5: 2)
-Entering state 30
-Stack now 0 6 8 21 30
-Reading a token
-Next token is token '*' (2.7: )
-Shifting token '*' (2.7: )
-Entering state 22
-Stack now 0 6 8 21 30 22
-Reading a token
-Next token is token '-' (2.9: )
-Shifting token '-' (2.9: )
-Entering state 2
-Stack now 0 6 8 21 30 22 2
-Reading a token
-Next token is token number (2.10: 3)
-Shifting token number (2.10: 3)
-Entering state 1
-Stack now 0 6 8 21 30 22 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (2.10: 3)
--> $$ = nterm exp (2.10: 3)
-Entering state 10
-Stack now 0 6 8 21 30 22 2 10
-Reading a token
-Next token is token '=' (2.12: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (2.9: )
-   $2 = nterm exp (2.10: 3)
--> $$ = nterm exp (2.9-10: -3)
-Entering state 31
-Stack now 0 6 8 21 30 22 31
-Next token is token '=' (2.12: )
-Reducing stack by rule 9 (line 114):
-   $1 = nterm exp (2.5: 2)
-   $2 = token '*' (2.7: )
-   $3 = nterm exp (2.9-10: -3)
--> $$ = nterm exp (2.5-10: -6)
-Entering state 30
-Stack now 0 6 8 21 30
-Next token is token '=' (2.12: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (2.1: 1)
-   $2 = token '+' (2.3: )
-   $3 = nterm exp (2.5-10: -6)
--> $$ = nterm exp (2.1-10: -5)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (2.12: )
-Shifting token '=' (2.12: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token '-' (2.14: )
-Shifting token '-' (2.14: )
-Entering state 2
-Stack now 0 6 8 19 2
-Reading a token
-Next token is token number (2.15: 5)
-Shifting token number (2.15: 5)
-Entering state 1
-Stack now 0 6 8 19 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (2.15: 5)
--> $$ = nterm exp (2.15: 5)
-Entering state 10
-Stack now 0 6 8 19 2 10
-Reading a token
-Next token is token '\n' (2.16-3.0: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (2.14: )
-   $2 = nterm exp (2.15: 5)
--> $$ = nterm exp (2.14-15: -5)
-Entering state 28
-Stack now 0 6 8 19 28
-Next token is token '\n' (2.16-3.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (2.1-10: -5)
-   $2 = token '=' (2.12: )
-   $3 = nterm exp (2.14-15: -5)
--> $$ = nterm exp (2.1-15: -5)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (2.16-3.0: )
-Shifting token '\n' (2.16-3.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (2.1-15: -5)
-   $2 = token '\n' (2.16-3.0: )
--> $$ = nterm line (2.1-3.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-2.0: )
-   $2 = nterm line (2.1-3.0: )
--> $$ = nterm input (1.1-3.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '\n' (3.1-4.0: )
-Shifting token '\n' (3.1-4.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 96):
-   $1 = token '\n' (3.1-4.0: )
--> $$ = nterm line (3.1-4.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-3.0: )
-   $2 = nterm line (3.1-4.0: )
--> $$ = nterm input (1.1-4.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '-' (4.1: )
-Shifting token '-' (4.1: )
-Entering state 2
-Stack now 0 6 2
-Reading a token
-Next token is token number (4.2: 1)
-Shifting token number (4.2: 1)
-Entering state 1
-Stack now 0 6 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (4.2: 1)
--> $$ = nterm exp (4.2: 1)
-Entering state 10
-Stack now 0 6 2 10
-Reading a token
-Next token is token '^' (4.3: )
-Shifting token '^' (4.3: )
-Entering state 24
-Stack now 0 6 2 10 24
-Reading a token
-Next token is token number (4.4: 2)
-Shifting token number (4.4: 2)
-Entering state 1
-Stack now 0 6 2 10 24 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (4.4: 2)
--> $$ = nterm exp (4.4: 2)
-Entering state 33
-Stack now 0 6 2 10 24 33
-Reading a token
-Next token is token '=' (4.6: )
-Reducing stack by rule 12 (line 125):
-   $1 = nterm exp (4.2: 1)
-   $2 = token '^' (4.3: )
-   $3 = nterm exp (4.4: 2)
--> $$ = nterm exp (4.2-4: 1)
-Entering state 10
-Stack now 0 6 2 10
-Next token is token '=' (4.6: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (4.1: )
-   $2 = nterm exp (4.2-4: 1)
--> $$ = nterm exp (4.1-4: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (4.6: )
-Shifting token '=' (4.6: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token '-' (4.8: )
-Shifting token '-' (4.8: )
-Entering state 2
-Stack now 0 6 8 19 2
-Reading a token
-Next token is token number (4.9: 1)
-Shifting token number (4.9: 1)
-Entering state 1
-Stack now 0 6 8 19 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (4.9: 1)
--> $$ = nterm exp (4.9: 1)
-Entering state 10
-Stack now 0 6 8 19 2 10
-Reading a token
-Next token is token '\n' (4.10-5.0: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (4.8: )
-   $2 = nterm exp (4.9: 1)
--> $$ = nterm exp (4.8-9: -1)
-Entering state 28
-Stack now 0 6 8 19 28
-Next token is token '\n' (4.10-5.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (4.1-4: -1)
-   $2 = token '=' (4.6: )
-   $3 = nterm exp (4.8-9: -1)
--> $$ = nterm exp (4.1-9: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (4.10-5.0: )
-Shifting token '\n' (4.10-5.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (4.1-9: -1)
-   $2 = token '\n' (4.10-5.0: )
--> $$ = nterm line (4.1-5.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-4.0: )
-   $2 = nterm line (4.1-5.0: )
--> $$ = nterm input (1.1-5.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '(' (5.1: )
-Shifting token '(' (5.1: )
-Entering state 4
-Stack now 0 6 4
-Reading a token
-Next token is token '-' (5.2: )
-Shifting token '-' (5.2: )
-Entering state 2
-Stack now 0 6 4 2
-Reading a token
-Next token is token number (5.3: 1)
-Shifting token number (5.3: 1)
-Entering state 1
-Stack now 0 6 4 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (5.3: 1)
--> $$ = nterm exp (5.3: 1)
-Entering state 10
-Stack now 0 6 4 2 10
-Reading a token
-Next token is token ')' (5.4: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (5.2: )
-   $2 = nterm exp (5.3: 1)
--> $$ = nterm exp (5.2-3: -1)
-Entering state 12
-Stack now 0 6 4 12
-Next token is token ')' (5.4: )
-Shifting token ')' (5.4: )
-Entering state 27
-Stack now 0 6 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (5.1: )
-   $2 = nterm exp (5.2-3: -1)
-   $3 = token ')' (5.4: )
--> $$ = nterm exp (5.1-4: -1)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '^' (5.5: )
-Shifting token '^' (5.5: )
-Entering state 24
-Stack now 0 6 8 24
-Reading a token
-Next token is token number (5.6: 2)
-Shifting token number (5.6: 2)
-Entering state 1
-Stack now 0 6 8 24 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (5.6: 2)
--> $$ = nterm exp (5.6: 2)
-Entering state 33
-Stack now 0 6 8 24 33
-Reading a token
-Next token is token '=' (5.8: )
-Reducing stack by rule 12 (line 125):
-   $1 = nterm exp (5.1-4: -1)
-   $2 = token '^' (5.5: )
-   $3 = nterm exp (5.6: 2)
--> $$ = nterm exp (5.1-6: 1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (5.8: )
-Shifting token '=' (5.8: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token number (5.10: 1)
-Shifting token number (5.10: 1)
-Entering state 1
-Stack now 0 6 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (5.10: 1)
--> $$ = nterm exp (5.10: 1)
-Entering state 28
-Stack now 0 6 8 19 28
-Reading a token
-Next token is token '\n' (5.11-6.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (5.1-6: 1)
-   $2 = token '=' (5.8: )
-   $3 = nterm exp (5.10: 1)
--> $$ = nterm exp (5.1-10: 1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (5.11-6.0: )
-Shifting token '\n' (5.11-6.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (5.1-10: 1)
-   $2 = token '\n' (5.11-6.0: )
--> $$ = nterm line (5.1-6.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-5.0: )
-   $2 = nterm line (5.1-6.0: )
--> $$ = nterm input (1.1-6.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '\n' (6.1-7.0: )
-Shifting token '\n' (6.1-7.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 96):
-   $1 = token '\n' (6.1-7.0: )
--> $$ = nterm line (6.1-7.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-6.0: )
-   $2 = nterm line (6.1-7.0: )
--> $$ = nterm input (1.1-7.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '-' (7.1: )
-Shifting token '-' (7.1: )
-Entering state 2
-Stack now 0 6 2
-Reading a token
-Next token is token '-' (7.2: )
-Shifting token '-' (7.2: )
-Entering state 2
-Stack now 0 6 2 2
-Reading a token
-Next token is token '-' (7.3: )
-Shifting token '-' (7.3: )
-Entering state 2
-Stack now 0 6 2 2 2
-Reading a token
-Next token is token number (7.4: 1)
-Shifting token number (7.4: 1)
-Entering state 1
-Stack now 0 6 2 2 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (7.4: 1)
--> $$ = nterm exp (7.4: 1)
-Entering state 10
-Stack now 0 6 2 2 2 10
-Reading a token
-Next token is token '=' (7.6: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (7.3: )
-   $2 = nterm exp (7.4: 1)
--> $$ = nterm exp (7.3-4: -1)
-Entering state 10
-Stack now 0 6 2 2 10
-Next token is token '=' (7.6: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (7.2: )
-   $2 = nterm exp (7.3-4: -1)
--> $$ = nterm exp (7.2-4: 1)
-Entering state 10
-Stack now 0 6 2 10
-Next token is token '=' (7.6: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (7.1: )
-   $2 = nterm exp (7.2-4: 1)
--> $$ = nterm exp (7.1-4: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (7.6: )
-Shifting token '=' (7.6: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token '-' (7.8: )
-Shifting token '-' (7.8: )
-Entering state 2
-Stack now 0 6 8 19 2
-Reading a token
-Next token is token number (7.9: 1)
-Shifting token number (7.9: 1)
-Entering state 1
-Stack now 0 6 8 19 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (7.9: 1)
--> $$ = nterm exp (7.9: 1)
-Entering state 10
-Stack now 0 6 8 19 2 10
-Reading a token
-Next token is token '\n' (7.10-8.0: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (7.8: )
-   $2 = nterm exp (7.9: 1)
--> $$ = nterm exp (7.8-9: -1)
-Entering state 28
-Stack now 0 6 8 19 28
-Next token is token '\n' (7.10-8.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (7.1-4: -1)
-   $2 = token '=' (7.6: )
-   $3 = nterm exp (7.8-9: -1)
--> $$ = nterm exp (7.1-9: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (7.10-8.0: )
-Shifting token '\n' (7.10-8.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (7.1-9: -1)
-   $2 = token '\n' (7.10-8.0: )
--> $$ = nterm line (7.1-8.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-7.0: )
-   $2 = nterm line (7.1-8.0: )
--> $$ = nterm input (1.1-8.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '\n' (8.1-9.0: )
-Shifting token '\n' (8.1-9.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 96):
-   $1 = token '\n' (8.1-9.0: )
--> $$ = nterm line (8.1-9.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-8.0: )
-   $2 = nterm line (8.1-9.0: )
--> $$ = nterm input (1.1-9.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token number (9.1: 1)
-Shifting token number (9.1: 1)
-Entering state 1
-Stack now 0 6 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (9.1: 1)
--> $$ = nterm exp (9.1: 1)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '-' (9.3: )
-Shifting token '-' (9.3: )
-Entering state 20
-Stack now 0 6 8 20
-Reading a token
-Next token is token number (9.5: 2)
-Shifting token number (9.5: 2)
-Entering state 1
-Stack now 0 6 8 20 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (9.5: 2)
--> $$ = nterm exp (9.5: 2)
-Entering state 29
-Stack now 0 6 8 20 29
-Reading a token
-Next token is token '-' (9.7: )
-Reducing stack by rule 8 (line 113):
-   $1 = nterm exp (9.1: 1)
-   $2 = token '-' (9.3: )
-   $3 = nterm exp (9.5: 2)
--> $$ = nterm exp (9.1-5: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '-' (9.7: )
-Shifting token '-' (9.7: )
-Entering state 20
-Stack now 0 6 8 20
-Reading a token
-Next token is token number (9.9: 3)
-Shifting token number (9.9: 3)
-Entering state 1
-Stack now 0 6 8 20 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (9.9: 3)
--> $$ = nterm exp (9.9: 3)
-Entering state 29
-Stack now 0 6 8 20 29
-Reading a token
-Next token is token '=' (9.11: )
-Reducing stack by rule 8 (line 113):
-   $1 = nterm exp (9.1-5: -1)
-   $2 = token '-' (9.7: )
-   $3 = nterm exp (9.9: 3)
--> $$ = nterm exp (9.1-9: -4)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (9.11: )
-Shifting token '=' (9.11: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token '-' (9.13: )
-Shifting token '-' (9.13: )
-Entering state 2
-Stack now 0 6 8 19 2
-Reading a token
-Next token is token number (9.14: 4)
-Shifting token number (9.14: 4)
-Entering state 1
-Stack now 0 6 8 19 2 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (9.14: 4)
--> $$ = nterm exp (9.14: 4)
-Entering state 10
-Stack now 0 6 8 19 2 10
-Reading a token
-Next token is token '\n' (9.15-10.0: )
-Reducing stack by rule 11 (line 124):
-   $1 = token '-' (9.13: )
-   $2 = nterm exp (9.14: 4)
--> $$ = nterm exp (9.13-14: -4)
-Entering state 28
-Stack now 0 6 8 19 28
-Next token is token '\n' (9.15-10.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (9.1-9: -4)
-   $2 = token '=' (9.11: )
-   $3 = nterm exp (9.13-14: -4)
--> $$ = nterm exp (9.1-14: -4)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (9.15-10.0: )
-Shifting token '\n' (9.15-10.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (9.1-14: -4)
-   $2 = token '\n' (9.15-10.0: )
--> $$ = nterm line (9.1-10.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-9.0: )
-   $2 = nterm line (9.1-10.0: )
--> $$ = nterm input (1.1-10.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token number (10.1: 1)
-Shifting token number (10.1: 1)
-Entering state 1
-Stack now 0 6 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (10.1: 1)
--> $$ = nterm exp (10.1: 1)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '-' (10.3: )
-Shifting token '-' (10.3: )
-Entering state 20
-Stack now 0 6 8 20
-Reading a token
-Next token is token '(' (10.5: )
-Shifting token '(' (10.5: )
-Entering state 4
-Stack now 0 6 8 20 4
-Reading a token
-Next token is token number (10.6: 2)
-Shifting token number (10.6: 2)
-Entering state 1
-Stack now 0 6 8 20 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (10.6: 2)
--> $$ = nterm exp (10.6: 2)
-Entering state 12
-Stack now 0 6 8 20 4 12
-Reading a token
-Next token is token '-' (10.8: )
-Shifting token '-' (10.8: )
-Entering state 20
-Stack now 0 6 8 20 4 12 20
-Reading a token
-Next token is token number (10.10: 3)
-Shifting token number (10.10: 3)
-Entering state 1
-Stack now 0 6 8 20 4 12 20 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (10.10: 3)
--> $$ = nterm exp (10.10: 3)
-Entering state 29
-Stack now 0 6 8 20 4 12 20 29
-Reading a token
-Next token is token ')' (10.11: )
-Reducing stack by rule 8 (line 113):
-   $1 = nterm exp (10.6: 2)
-   $2 = token '-' (10.8: )
-   $3 = nterm exp (10.10: 3)
--> $$ = nterm exp (10.6-10: -1)
-Entering state 12
-Stack now 0 6 8 20 4 12
-Next token is token ')' (10.11: )
-Shifting token ')' (10.11: )
-Entering state 27
-Stack now 0 6 8 20 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (10.5: )
-   $2 = nterm exp (10.6-10: -1)
-   $3 = token ')' (10.11: )
--> $$ = nterm exp (10.5-11: -1)
-Entering state 29
-Stack now 0 6 8 20 29
-Reading a token
-Next token is token '=' (10.13: )
-Reducing stack by rule 8 (line 113):
-   $1 = nterm exp (10.1: 1)
-   $2 = token '-' (10.3: )
-   $3 = nterm exp (10.5-11: -1)
--> $$ = nterm exp (10.1-11: 2)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (10.13: )
-Shifting token '=' (10.13: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token number (10.15: 2)
-Shifting token number (10.15: 2)
-Entering state 1
-Stack now 0 6 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (10.15: 2)
--> $$ = nterm exp (10.15: 2)
-Entering state 28
-Stack now 0 6 8 19 28
-Reading a token
-Next token is token '\n' (10.16-11.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (10.1-11: 2)
-   $2 = token '=' (10.13: )
-   $3 = nterm exp (10.15: 2)
--> $$ = nterm exp (10.1-15: 2)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (10.16-11.0: )
-Shifting token '\n' (10.16-11.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (10.1-15: 2)
-   $2 = token '\n' (10.16-11.0: )
--> $$ = nterm line (10.1-11.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-10.0: )
-   $2 = nterm line (10.1-11.0: )
--> $$ = nterm input (1.1-11.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '\n' (11.1-12.0: )
-Shifting token '\n' (11.1-12.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 96):
-   $1 = token '\n' (11.1-12.0: )
--> $$ = nterm line (11.1-12.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-11.0: )
-   $2 = nterm line (11.1-12.0: )
--> $$ = nterm input (1.1-12.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token number (12.1: 2)
-Shifting token number (12.1: 2)
-Entering state 1
-Stack now 0 6 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (12.1: 2)
--> $$ = nterm exp (12.1: 2)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '^' (12.2: )
-Shifting token '^' (12.2: )
-Entering state 24
-Stack now 0 6 8 24
-Reading a token
-Next token is token number (12.3: 2)
-Shifting token number (12.3: 2)
-Entering state 1
-Stack now 0 6 8 24 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (12.3: 2)
--> $$ = nterm exp (12.3: 2)
-Entering state 33
-Stack now 0 6 8 24 33
-Reading a token
-Next token is token '^' (12.4: )
-Shifting token '^' (12.4: )
-Entering state 24
-Stack now 0 6 8 24 33 24
-Reading a token
-Next token is token number (12.5: 3)
-Shifting token number (12.5: 3)
-Entering state 1
-Stack now 0 6 8 24 33 24 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (12.5: 3)
--> $$ = nterm exp (12.5: 3)
-Entering state 33
-Stack now 0 6 8 24 33 24 33
-Reading a token
-Next token is token '=' (12.7: )
-Reducing stack by rule 12 (line 125):
-   $1 = nterm exp (12.3: 2)
-   $2 = token '^' (12.4: )
-   $3 = nterm exp (12.5: 3)
--> $$ = nterm exp (12.3-5: 8)
-Entering state 33
-Stack now 0 6 8 24 33
-Next token is token '=' (12.7: )
-Reducing stack by rule 12 (line 125):
-   $1 = nterm exp (12.1: 2)
-   $2 = token '^' (12.2: )
-   $3 = nterm exp (12.3-5: 8)
--> $$ = nterm exp (12.1-5: 256)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (12.7: )
-Shifting token '=' (12.7: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token number (12.9-11: 256)
-Shifting token number (12.9-11: 256)
-Entering state 1
-Stack now 0 6 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (12.9-11: 256)
--> $$ = nterm exp (12.9-11: 256)
-Entering state 28
-Stack now 0 6 8 19 28
-Reading a token
-Next token is token '\n' (12.12-13.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (12.1-5: 256)
-   $2 = token '=' (12.7: )
-   $3 = nterm exp (12.9-11: 256)
--> $$ = nterm exp (12.1-11: 256)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (12.12-13.0: )
-Shifting token '\n' (12.12-13.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (12.1-11: 256)
-   $2 = token '\n' (12.12-13.0: )
--> $$ = nterm line (12.1-13.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-12.0: )
-   $2 = nterm line (12.1-13.0: )
--> $$ = nterm input (1.1-13.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '(' (13.1: )
-Shifting token '(' (13.1: )
-Entering state 4
-Stack now 0 6 4
-Reading a token
-Next token is token number (13.2: 2)
-Shifting token number (13.2: 2)
-Entering state 1
-Stack now 0 6 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (13.2: 2)
--> $$ = nterm exp (13.2: 2)
-Entering state 12
-Stack now 0 6 4 12
-Reading a token
-Next token is token '^' (13.3: )
-Shifting token '^' (13.3: )
-Entering state 24
-Stack now 0 6 4 12 24
-Reading a token
-Next token is token number (13.4: 2)
-Shifting token number (13.4: 2)
-Entering state 1
-Stack now 0 6 4 12 24 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (13.4: 2)
--> $$ = nterm exp (13.4: 2)
-Entering state 33
-Stack now 0 6 4 12 24 33
-Reading a token
-Next token is token ')' (13.5: )
-Reducing stack by rule 12 (line 125):
-   $1 = nterm exp (13.2: 2)
-   $2 = token '^' (13.3: )
-   $3 = nterm exp (13.4: 2)
--> $$ = nterm exp (13.2-4: 4)
-Entering state 12
-Stack now 0 6 4 12
-Next token is token ')' (13.5: )
-Shifting token ')' (13.5: )
-Entering state 27
-Stack now 0 6 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (13.1: )
-   $2 = nterm exp (13.2-4: 4)
-   $3 = token ')' (13.5: )
--> $$ = nterm exp (13.1-5: 4)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '^' (13.6: )
-Shifting token '^' (13.6: )
-Entering state 24
-Stack now 0 6 8 24
-Reading a token
-Next token is token number (13.7: 3)
-Shifting token number (13.7: 3)
-Entering state 1
-Stack now 0 6 8 24 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (13.7: 3)
--> $$ = nterm exp (13.7: 3)
-Entering state 33
-Stack now 0 6 8 24 33
-Reading a token
-Next token is token '=' (13.9: )
-Reducing stack by rule 12 (line 125):
-   $1 = nterm exp (13.1-5: 4)
-   $2 = token '^' (13.6: )
-   $3 = nterm exp (13.7: 3)
--> $$ = nterm exp (13.1-7: 64)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (13.9: )
-Shifting token '=' (13.9: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token number (13.11-12: 64)
-Shifting token number (13.11-12: 64)
-Entering state 1
-Stack now 0 6 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (13.11-12: 64)
--> $$ = nterm exp (13.11-12: 64)
-Entering state 28
-Stack now 0 6 8 19 28
-Reading a token
-Next token is token '\n' (13.13-14.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (13.1-7: 64)
-   $2 = token '=' (13.9: )
-   $3 = nterm exp (13.11-12: 64)
--> $$ = nterm exp (13.1-12: 64)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (13.13-14.0: )
-Shifting token '\n' (13.13-14.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (13.1-12: 64)
-   $2 = token '\n' (13.13-14.0: )
--> $$ = nterm line (13.1-14.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 92):
-   $1 = nterm input (1.1-13.0: )
-   $2 = nterm line (13.1-14.0: )
--> $$ = nterm input (1.1-14.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (14.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (14.1: )
-Cleanup: popping nterm input (1.1-14.0: )
-./calc.at:1362: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
-Entering state 1
-Stack now 0 4 12 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 30
-Stack now 0 4 12 21 30
-Reading a token
-Next token is token ')' (1.7: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 27
-Stack now 0 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 23
-Stack now 0 8 23
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Stack now 0 8 23 4
-Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
-Entering state 1
-Stack now 0 8 23 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Stack now 0 8 23 4 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 20
-Stack now 0 8 23 4 12 20
-Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
-Entering state 1
-Stack now 0 8 23 4 12 20 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 29
-Stack now 0 8 23 4 12 20 29
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack by rule 8 (line 113):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Stack now 0 8 23 4 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 27
-Stack now 0 8 23 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 32
-Stack now 0 8 23 32
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 10 (line 115):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -69433,104 +69872,10 @@
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 stderr:
-stdout:
-input:
-./calc.at:1360: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-  | 1 2
-./calc.at:1362:  $PREPARSER ./calc  input
-./calc.at:1363: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
-stderr:
-./calc.at:1360: cat stderr
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token number (1.3: 2)
-1.3: syntax error, unexpected number
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token number (1.3: 2)
-Stack now 0
-489. calc.at:1360:  ok
-./calc.at:1363: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c calc.h
-
-./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token number (1.3: 2)
-1.3: syntax error, unexpected number
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token number (1.3: 2)
-Stack now 0
 input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1363:  $PREPARSER ./calc  input
-
-./calc.at:1362: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
+./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (1 + # + 1) = 1111
+./calc.at:1358:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -70548,1034 +70893,388 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
-./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1362: cat stderr
+./calc.at:1363: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+493. calc.at:1367: testing Calculator parse.error=custom  ...
 stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Return for a new token:
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
 Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Return for a new token:
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
 Entering state 21
-Stack now 0 8 21
+Stack now 0 4 12 21
+Return for a new token:
+1.6: syntax error: invalid character: '#'
 Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Stack now 0 8 21 30
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Return for a new token:
 Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-8: )
+Entering state 11
+Stack now 0 4 11
+Return for a new token:
 Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
+Next token is token number (1.10: 1)
+Error: discarding token number (1.10: 1)
+Error: popping token error (1.2-8: )
+Stack now 0 4
+Shifting token error (1.2-10: )
+Entering state 11
+Stack now 0 4 11
+Return for a new token:
 Reading a token
-Next token is token '=' (1.11: )
-Reducing stack by rule 9 (line 101):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '=' (1.11: )
-Reducing stack by rule 7 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
+Return for a new token:
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
 Entering state 19
 Stack now 0 8 19
+Return for a new token:
 Reading a token
-Next token is token "number" (1.13: 7)
-Shifting token "number" (1.13: 7)
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
 Entering state 1
 Stack now 0 8 19 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.13: 7)
--> $$ = nterm exp (1.13: 7)
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
 Entering state 28
 Stack now 0 8 19 28
+Return for a new token:
 Reading a token
-Next token is token '\n' (1.14-2.0: )
-Reducing stack by rule 6 (line 89):
-   $1 = nterm exp (1.1-9: 7)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13: 7)
--> $$ = nterm exp (1.1-13: 7)
+Next token is token '\n' (1.19-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.14-2.0: )
-Shifting token '\n' (1.14-2.0: )
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
 Entering state 25
 Stack now 0 8 25
-Reducing stack by rule 4 (line 84):
-   $1 = nterm exp (1.1-13: 7)
-   $2 = token '\n' (1.14-2.0: )
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
-Reducing stack by rule 1 (line 78):
+Reducing stack by rule 1 (line 91):
    $1 = nterm line (1.1-2.0: )
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
+Return for a new token:
 Reading a token
-Next token is token "number" (2.1: 1)
-Shifting token "number" (2.1: 1)
-Entering state 1
-Stack now 0 6 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (2.1: 1)
--> $$ = nterm exp (2.1: 1)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '+' (2.3: )
-Shifting token '+' (2.3: )
-Entering state 21
-Stack now 0 6 8 21
-Reading a token
-Next token is token "number" (2.5: 2)
-Shifting token "number" (2.5: 2)
-Entering state 1
-Stack now 0 6 8 21 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (2.5: 2)
--> $$ = nterm exp (2.5: 2)
-Entering state 30
-Stack now 0 6 8 21 30
-Reading a token
-Next token is token '*' (2.7: )
-Shifting token '*' (2.7: )
-Entering state 22
-Stack now 0 6 8 21 30 22
-Reading a token
-Next token is token '-' (2.9: )
-Shifting token '-' (2.9: )
-Entering state 2
-Stack now 0 6 8 21 30 22 2
-Reading a token
-Next token is token "number" (2.10: 3)
-Shifting token "number" (2.10: 3)
-Entering state 1
-Stack now 0 6 8 21 30 22 2 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (2.10: 3)
--> $$ = nterm exp (2.10: 3)
-Entering state 10
-Stack now 0 6 8 21 30 22 2 10
-Reading a token
-Next token is token '=' (2.12: )
-Reducing stack by rule 11 (line 111):
-   $1 = token '-' (2.9: )
-   $2 = nterm exp (2.10: 3)
--> $$ = nterm exp (2.9-10: -3)
-Entering state 31
-Stack now 0 6 8 21 30 22 31
-Next token is token '=' (2.12: )
-Reducing stack by rule 9 (line 101):
-   $1 = nterm exp (2.5: 2)
-   $2 = token '*' (2.7: )
-   $3 = nterm exp (2.9-10: -3)
--> $$ = nterm exp (2.5-10: -6)
-Entering state 30
-Stack now 0 6 8 21 30
-Next token is token '=' (2.12: )
-Reducing stack by rule 7 (line 99):
-   $1 = nterm exp (2.1: 1)
-   $2 = token '+' (2.3: )
-   $3 = nterm exp (2.5-10: -6)
--> $$ = nterm exp (2.1-10: -5)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (2.12: )
-Shifting token '=' (2.12: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token '-' (2.14: )
-Shifting token '-' (2.14: )
-Entering state 2
-Stack now 0 6 8 19 2
-Reading a token
-Next token is token "number" (2.15: 5)
-Shifting token "number" (2.15: 5)
-Entering state 1
-Stack now 0 6 8 19 2 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (2.15: 5)
--> $$ = nterm exp (2.15: 5)
-Entering state 10
-Stack now 0 6 8 19 2 10
-Reading a token
-Next token is token '\n' (2.16-3.0: )
-Reducing stack by rule 11 (line 111):
-   $1 = token '-' (2.14: )
-   $2 = nterm exp (2.15: 5)
--> $$ = nterm exp (2.14-15: -5)
-Entering state 28
-Stack now 0 6 8 19 28
-Next token is token '\n' (2.16-3.0: )
-Reducing stack by rule 6 (line 89):
-   $1 = nterm exp (2.1-10: -5)
-   $2 = token '=' (2.12: )
-   $3 = nterm exp (2.14-15: -5)
--> $$ = nterm exp (2.1-15: -5)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (2.16-3.0: )
-Shifting token '\n' (2.16-3.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 84):
-   $1 = nterm exp (2.1-15: -5)
-   $2 = token '\n' (2.16-3.0: )
--> $$ = nterm line (2.1-3.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 79):
-   $1 = nterm input (1.1-2.0: )
-   $2 = nterm line (2.1-3.0: )
--> $$ = nterm input (1.1-3.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '\n' (3.1-4.0: )
-Shifting token '\n' (3.1-4.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 83):
-   $1 = token '\n' (3.1-4.0: )
--> $$ = nterm line (3.1-4.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 79):
-   $1 = nterm input (1.1-3.0: )
-   $2 = nterm line (3.1-4.0: )
--> $$ = nterm input (1.1-4.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '-' (4.1: )
-Shifting token '-' (4.1: )
-Entering state 2
-Stack now 0 6 2
-Reading a token
-Next token is token "number" (4.2: 1)
-Shifting token "number" (4.2: 1)
-Entering state 1
-Stack now 0 6 2 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (4.2: 1)
--> $$ = nterm exp (4.2: 1)
-Entering state 10
-Stack now 0 6 2 10
-Reading a token
-Next token is token '^' (4.3: )
-Shifting token '^' (4.3: )
-Entering state 24
-Stack now 0 6 2 10 24
-Reading a token
-Next token is token "number" (4.4: 2)
-Shifting token "number" (4.4: 2)
-Entering state 1
-Stack now 0 6 2 10 24 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (4.4: 2)
--> $$ = nterm exp (4.4: 2)
-Entering state 33
-Stack now 0 6 2 10 24 33
-Reading a token
-Next token is token '=' (4.6: )
-Reducing stack by rule 12 (line 112):
-   $1 = nterm exp (4.2: 1)
-   $2 = token '^' (4.3: )
-   $3 = nterm exp (4.4: 2)
--> $$ = nterm exp (4.2-4: 1)
-Entering state 10
-Stack now 0 6 2 10
-Next token is token '=' (4.6: )
-Reducing stack by rule 11 (line 111):
-   $1 = token '-' (4.1: )
-   $2 = nterm exp (4.2-4: 1)
--> $$ = nterm exp (4.1-4: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (4.6: )
-Shifting token '=' (4.6: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token '-' (4.8: )
-Shifting token '-' (4.8: )
-Entering state 2
-Stack now 0 6 8 19 2
-Reading a token
-Next token is token "number" (4.9: 1)
-Shifting token "number" (4.9: 1)
-Entering state 1
-Stack now 0 6 8 19 2 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (4.9: 1)
--> $$ = nterm exp (4.9: 1)
-Entering state 10
-Stack now 0 6 8 19 2 10
-Reading a token
-Next token is token '\n' (4.10-5.0: )
-Reducing stack by rule 11 (line 111):
-   $1 = token '-' (4.8: )
-   $2 = nterm exp (4.9: 1)
--> $$ = nterm exp (4.8-9: -1)
-Entering state 28
-Stack now 0 6 8 19 28
-Next token is token '\n' (4.10-5.0: )
-Reducing stack by rule 6 (line 89):
-   $1 = nterm exp (4.1-4: -1)
-   $2 = token '=' (4.6: )
-   $3 = nterm exp (4.8-9: -1)
--> $$ = nterm exp (4.1-9: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (4.10-5.0: )
-Shifting token '\n' (4.10-5.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 84):
-   $1 = nterm exp (4.1-9: -1)
-   $2 = token '\n' (4.10-5.0: )
--> $$ = nterm line (4.1-5.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 79):
-   $1 = nterm input (1.1-4.0: )
-   $2 = nterm line (4.1-5.0: )
--> $$ = nterm input (1.1-5.0: )
-Entering state 6
-Stack now 0 6
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1362: cat stderr
+./calc.at:1367: mv calc.y.tmp calc.y
+
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
 Reading a token
-Next token is token '(' (5.1: )
-Shifting token '(' (5.1: )
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
 Entering state 4
-Stack now 0 6 4
-Reading a token
-Next token is token '-' (5.2: )
-Shifting token '-' (5.2: )
-Entering state 2
-Stack now 0 6 4 2
-Reading a token
-Next token is token "number" (5.3: 1)
-Shifting token "number" (5.3: 1)
-Entering state 1
-Stack now 0 6 4 2 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (5.3: 1)
--> $$ = nterm exp (5.3: 1)
-Entering state 10
-Stack now 0 6 4 2 10
-Reading a token
-Next token is token ')' (5.4: )
-Reducing stack by rule 11 (line 111):
-   $1 = token '-' (5.2: )
-   $2 = nterm exp (5.3: 1)
--> $$ = nterm exp (5.2-3: -1)
-Entering state 12
-Stack now 0 6 4 12
-Next token is token ')' (5.4: )
-Shifting token ')' (5.4: )
-Entering state 27
-Stack now 0 6 4 12 27
-Reducing stack by rule 13 (line 113):
-   $1 = token '(' (5.1: )
-   $2 = nterm exp (5.2-3: -1)
-   $3 = token ')' (5.4: )
--> $$ = nterm exp (5.1-4: -1)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '^' (5.5: )
-Shifting token '^' (5.5: )
-Entering state 24
-Stack now 0 6 8 24
-Reading a token
-Next token is token "number" (5.6: 2)
-Shifting token "number" (5.6: 2)
-Entering state 1
-Stack now 0 6 8 24 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (5.6: 2)
--> $$ = nterm exp (5.6: 2)
-Entering state 33
-Stack now 0 6 8 24 33
-Reading a token
-Next token is token '=' (5.8: )
-Reducing stack by rule 12 (line 112):
-   $1 = nterm exp (5.1-4: -1)
-   $2 = token '^' (5.5: )
-   $3 = nterm exp (5.6: 2)
--> $$ = nterm exp (5.1-6: 1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (5.8: )
-Shifting token '=' (5.8: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token "number" (5.10: 1)
-Shifting token "number" (5.10: 1)
-Entering state 1
-Stack now 0 6 8 19 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (5.10: 1)
--> $$ = nterm exp (5.10: 1)
-Entering state 28
-Stack now 0 6 8 19 28
-Reading a token
-Next token is token '\n' (5.11-6.0: )
-Reducing stack by rule 6 (line 89):
-   $1 = nterm exp (5.1-6: 1)
-   $2 = token '=' (5.8: )
-   $3 = nterm exp (5.10: 1)
--> $$ = nterm exp (5.1-10: 1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (5.11-6.0: )
-Shifting token '\n' (5.11-6.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 84):
-   $1 = nterm exp (5.1-10: 1)
-   $2 = token '\n' (5.11-6.0: )
--> $$ = nterm line (5.1-6.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 79):
-   $1 = nterm input (1.1-5.0: )
-   $2 = nterm line (5.1-6.0: )
--> $$ = nterm input (1.1-6.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '\n' (6.1-7.0: )
-Shifting token '\n' (6.1-7.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 83):
-   $1 = token '\n' (6.1-7.0: )
--> $$ = nterm line (6.1-7.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 79):
-   $1 = nterm input (1.1-6.0: )
-   $2 = nterm line (6.1-7.0: )
--> $$ = nterm input (1.1-7.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '-' (7.1: )
-Shifting token '-' (7.1: )
-Entering state 2
-Stack now 0 6 2
-Reading a token
-Next token is token '-' (7.2: )
-Shifting token '-' (7.2: )
-Entering state 2
-Stack now 0 6 2 2
-Reading a token
-Next token is token '-' (7.3: )
-Shifting token '-' (7.3: )
-Entering state 2
-Stack now 0 6 2 2 2
-Reading a token
-Next token is token "number" (7.4: 1)
-Shifting token "number" (7.4: 1)
-Entering state 1
-Stack now 0 6 2 2 2 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (7.4: 1)
--> $$ = nterm exp (7.4: 1)
-Entering state 10
-Stack now 0 6 2 2 2 10
-Reading a token
-Next token is token '=' (7.6: )
-Reducing stack by rule 11 (line 111):
-   $1 = token '-' (7.3: )
-   $2 = nterm exp (7.4: 1)
--> $$ = nterm exp (7.3-4: -1)
-Entering state 10
-Stack now 0 6 2 2 10
-Next token is token '=' (7.6: )
-Reducing stack by rule 11 (line 111):
-   $1 = token '-' (7.2: )
-   $2 = nterm exp (7.3-4: -1)
--> $$ = nterm exp (7.2-4: 1)
-Entering state 10
-Stack now 0 6 2 10
-Next token is token '=' (7.6: )
-Reducing stack by rule 11 (line 111):
-   $1 = token '-' (7.1: )
-   $2 = nterm exp (7.2-4: 1)
--> $$ = nterm exp (7.1-4: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (7.6: )
-Shifting token '=' (7.6: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token '-' (7.8: )
-Shifting token '-' (7.8: )
-Entering state 2
-Stack now 0 6 8 19 2
-Reading a token
-Next token is token "number" (7.9: 1)
-Shifting token "number" (7.9: 1)
-Entering state 1
-Stack now 0 6 8 19 2 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (7.9: 1)
--> $$ = nterm exp (7.9: 1)
-Entering state 10
-Stack now 0 6 8 19 2 10
-Reading a token
-Next token is token '\n' (7.10-8.0: )
-Reducing stack by rule 11 (line 111):
-   $1 = token '-' (7.8: )
-   $2 = nterm exp (7.9: 1)
--> $$ = nterm exp (7.8-9: -1)
-Entering state 28
-Stack now 0 6 8 19 28
-Next token is token '\n' (7.10-8.0: )
-Reducing stack by rule 6 (line 89):
-   $1 = nterm exp (7.1-4: -1)
-   $2 = token '=' (7.6: )
-   $3 = nterm exp (7.8-9: -1)
--> $$ = nterm exp (7.1-9: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (7.10-8.0: )
-Shifting token '\n' (7.10-8.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 84):
-   $1 = nterm exp (7.1-9: -1)
-   $2 = token '\n' (7.10-8.0: )
--> $$ = nterm line (7.1-8.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 79):
-   $1 = nterm input (1.1-7.0: )
-   $2 = nterm line (7.1-8.0: )
--> $$ = nterm input (1.1-8.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '\n' (8.1-9.0: )
-Shifting token '\n' (8.1-9.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 83):
-   $1 = token '\n' (8.1-9.0: )
--> $$ = nterm line (8.1-9.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 79):
-   $1 = nterm input (1.1-8.0: )
-   $2 = nterm line (8.1-9.0: )
--> $$ = nterm input (1.1-9.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token "number" (9.1: 1)
-Shifting token "number" (9.1: 1)
-Entering state 1
-Stack now 0 6 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (9.1: 1)
--> $$ = nterm exp (9.1: 1)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '-' (9.3: )
-Shifting token '-' (9.3: )
-Entering state 20
-Stack now 0 6 8 20
-Reading a token
-Next token is token "number" (9.5: 2)
-Shifting token "number" (9.5: 2)
-Entering state 1
-Stack now 0 6 8 20 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (9.5: 2)
--> $$ = nterm exp (9.5: 2)
-Entering state 29
-Stack now 0 6 8 20 29
-Reading a token
-Next token is token '-' (9.7: )
-Reducing stack by rule 8 (line 100):
-   $1 = nterm exp (9.1: 1)
-   $2 = token '-' (9.3: )
-   $3 = nterm exp (9.5: 2)
--> $$ = nterm exp (9.1-5: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '-' (9.7: )
-Shifting token '-' (9.7: )
-Entering state 20
-Stack now 0 6 8 20
-Reading a token
-Next token is token "number" (9.9: 3)
-Shifting token "number" (9.9: 3)
-Entering state 1
-Stack now 0 6 8 20 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (9.9: 3)
--> $$ = nterm exp (9.9: 3)
-Entering state 29
-Stack now 0 6 8 20 29
-Reading a token
-Next token is token '=' (9.11: )
-Reducing stack by rule 8 (line 100):
-   $1 = nterm exp (9.1-5: -1)
-   $2 = token '-' (9.7: )
-   $3 = nterm exp (9.9: 3)
--> $$ = nterm exp (9.1-9: -4)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (9.11: )
-Shifting token '=' (9.11: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token '-' (9.13: )
-Shifting token '-' (9.13: )
-Entering state 2
-Stack now 0 6 8 19 2
-Reading a token
-Next token is token "number" (9.14: 4)
-Shifting token "number" (9.14: 4)
-Entering state 1
-Stack now 0 6 8 19 2 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (9.14: 4)
--> $$ = nterm exp (9.14: 4)
-Entering state 10
-Stack now 0 6 8 19 2 10
+Stack now 0 4
 Reading a token
-Next token is token '\n' (9.15-10.0: )
-Reducing stack by rule 11 (line 111):
-   $1 = token '-' (9.13: )
-   $2 = nterm exp (9.14: 4)
--> $$ = nterm exp (9.13-14: -4)
-Entering state 28
-Stack now 0 6 8 19 28
-Next token is token '\n' (9.15-10.0: )
-Reducing stack by rule 6 (line 89):
-   $1 = nterm exp (9.1-9: -4)
-   $2 = token '=' (9.11: )
-   $3 = nterm exp (9.13-14: -4)
--> $$ = nterm exp (9.1-14: -4)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (9.15-10.0: )
-Shifting token '\n' (9.15-10.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 84):
-   $1 = nterm exp (9.1-14: -4)
-   $2 = token '\n' (9.15-10.0: )
--> $$ = nterm line (9.1-10.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 79):
-   $1 = nterm input (1.1-9.0: )
-   $2 = nterm line (9.1-10.0: )
--> $$ = nterm input (1.1-10.0: )
-Entering state 6
-Stack now 0 6
+1.2: syntax error: invalid character: '#'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
 Reading a token
-Next token is token "number" (10.1: 1)
-Shifting token "number" (10.1: 1)
-Entering state 1
-Stack now 0 6 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (10.1: 1)
--> $$ = nterm exp (10.1: 1)
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
 Entering state 8
-Stack now 0 6 8
+Stack now 0 8
 Reading a token
-Next token is token '-' (10.3: )
-Shifting token '-' (10.3: )
-Entering state 20
-Stack now 0 6 8 20
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
-Next token is token '(' (10.5: )
-Shifting token '(' (10.5: )
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
 Entering state 4
-Stack now 0 6 8 20 4
-Reading a token
-Next token is token "number" (10.6: 2)
-Shifting token "number" (10.6: 2)
-Entering state 1
-Stack now 0 6 8 20 4 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (10.6: 2)
--> $$ = nterm exp (10.6: 2)
-Entering state 12
-Stack now 0 6 8 20 4 12
-Reading a token
-Next token is token '-' (10.8: )
-Shifting token '-' (10.8: )
-Entering state 20
-Stack now 0 6 8 20 4 12 20
+Stack now 0 8 21 4
 Reading a token
-Next token is token "number" (10.10: 3)
-Shifting token "number" (10.10: 3)
-Entering state 1
-Stack now 0 6 8 20 4 12 20 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (10.10: 3)
--> $$ = nterm exp (10.10: 3)
-Entering state 29
-Stack now 0 6 8 20 4 12 20 29
+1.8: syntax error: invalid character: '#'
+Shifting token error (1.8: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token invalid token (1.8: )
+Error: discarding token invalid token (1.8: )
+Error: popping token error (1.8: )
+Stack now 0 8 21 4
+Shifting token error (1.8: )
+Entering state 11
+Stack now 0 8 21 4 11
 Reading a token
-Next token is token ')' (10.11: )
-Reducing stack by rule 8 (line 100):
-   $1 = nterm exp (10.6: 2)
-   $2 = token '-' (10.8: )
-   $3 = nterm exp (10.10: 3)
--> $$ = nterm exp (10.6-10: -1)
-Entering state 12
-Stack now 0 6 8 20 4 12
-Next token is token ')' (10.11: )
-Shifting token ')' (10.11: )
-Entering state 27
-Stack now 0 6 8 20 4 12 27
-Reducing stack by rule 13 (line 113):
-   $1 = token '(' (10.5: )
-   $2 = nterm exp (10.6-10: -1)
-   $3 = token ')' (10.11: )
--> $$ = nterm exp (10.5-11: -1)
-Entering state 29
-Stack now 0 6 8 20 29
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.7: )
+   $2 = token error (1.8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
+Entering state 30
+Stack now 0 8 21 30
 Reading a token
-Next token is token '=' (10.13: )
-Reducing stack by rule 8 (line 100):
-   $1 = nterm exp (10.1: 1)
-   $2 = token '-' (10.3: )
-   $3 = nterm exp (10.5-11: -1)
--> $$ = nterm exp (10.1-11: 2)
+Next token is token '=' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
 Entering state 8
-Stack now 0 6 8
-Next token is token '=' (10.13: )
-Shifting token '=' (10.13: )
+Stack now 0 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
 Entering state 19
-Stack now 0 6 8 19
+Stack now 0 8 19
 Reading a token
-Next token is token "number" (10.15: 2)
-Shifting token "number" (10.15: 2)
+Next token is token number (1.13-16: 2222)
+Shifting token number (1.13-16: 2222)
 Entering state 1
-Stack now 0 6 8 19 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (10.15: 2)
--> $$ = nterm exp (10.15: 2)
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
 Entering state 28
-Stack now 0 6 8 19 28
+Stack now 0 8 19 28
 Reading a token
-Next token is token '\n' (10.16-11.0: )
-Reducing stack by rule 6 (line 89):
-   $1 = nterm exp (10.1-11: 2)
-   $2 = token '=' (10.13: )
-   $3 = nterm exp (10.15: 2)
--> $$ = nterm exp (10.1-15: 2)
+Next token is token '\n' (1.17-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
 Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (10.16-11.0: )
-Shifting token '\n' (10.16-11.0: )
+Stack now 0 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
 Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 84):
-   $1 = nterm exp (10.1-15: 2)
-   $2 = token '\n' (10.16-11.0: )
--> $$ = nterm line (10.1-11.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 79):
-   $1 = nterm input (1.1-10.0: )
-   $2 = nterm line (10.1-11.0: )
--> $$ = nterm input (1.1-11.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '\n' (11.1-12.0: )
-Shifting token '\n' (11.1-12.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 83):
-   $1 = token '\n' (11.1-12.0: )
--> $$ = nterm line (11.1-12.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 79):
-   $1 = nterm input (1.1-11.0: )
-   $2 = nterm line (11.1-12.0: )
--> $$ = nterm input (1.1-12.0: )
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
 Reading a token
-Next token is token "number" (12.1: 2)
-Shifting token "number" (12.1: 2)
-Entering state 1
-Stack now 0 6 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (12.1: 2)
--> $$ = nterm exp (12.1: 2)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '^' (12.2: )
-Shifting token '^' (12.2: )
-Entering state 24
-Stack now 0 6 8 24
-Reading a token
-Next token is token "number" (12.3: 2)
-Shifting token "number" (12.3: 2)
-Entering state 1
-Stack now 0 6 8 24 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (12.3: 2)
--> $$ = nterm exp (12.3: 2)
-Entering state 33
-Stack now 0 6 8 24 33
-Reading a token
-Next token is token '^' (12.4: )
-Shifting token '^' (12.4: )
-Entering state 24
-Stack now 0 6 8 24 33 24
-Reading a token
-Next token is token "number" (12.5: 3)
-Shifting token "number" (12.5: 3)
-Entering state 1
-Stack now 0 6 8 24 33 24 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (12.5: 3)
--> $$ = nterm exp (12.5: 3)
-Entering state 33
-Stack now 0 6 8 24 33 24 33
-Reading a token
-Next token is token '=' (12.7: )
-Reducing stack by rule 12 (line 112):
-   $1 = nterm exp (12.3: 2)
-   $2 = token '^' (12.4: )
-   $3 = nterm exp (12.5: 3)
--> $$ = nterm exp (12.3-5: 8)
-Entering state 33
-Stack now 0 6 8 24 33
-Next token is token '=' (12.7: )
-Reducing stack by rule 12 (line 112):
-   $1 = nterm exp (12.1: 2)
-   $2 = token '^' (12.2: )
-   $3 = nterm exp (12.3-5: 8)
--> $$ = nterm exp (12.1-5: 256)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (12.7: )
-Shifting token '=' (12.7: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token "number" (12.9-11: 256)
-Shifting token "number" (12.9-11: 256)
-Entering state 1
-Stack now 0 6 8 19 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (12.9-11: 256)
--> $$ = nterm exp (12.9-11: 256)
-Entering state 28
-Stack now 0 6 8 19 28
-Reading a token
-Next token is token '\n' (12.12-13.0: )
-Reducing stack by rule 6 (line 89):
-   $1 = nterm exp (12.1-5: 256)
-   $2 = token '=' (12.7: )
-   $3 = nterm exp (12.9-11: 256)
--> $$ = nterm exp (12.1-11: 256)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (12.12-13.0: )
-Shifting token '\n' (12.12-13.0: )
-Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 84):
-   $1 = nterm exp (12.1-11: 256)
-   $2 = token '\n' (12.12-13.0: )
--> $$ = nterm line (12.1-13.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 79):
-   $1 = nterm input (1.1-12.0: )
-   $2 = nterm line (12.1-13.0: )
--> $$ = nterm input (1.1-13.0: )
-Entering state 6
-Stack now 0 6
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1367: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+input:
+stderr:
+  | 1 2
+./calc.at:1363:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Stack now 0
 Reading a token
-Next token is token '(' (13.1: )
-Shifting token '(' (13.1: )
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
 Entering state 4
-Stack now 0 6 4
+Stack now 0 4
+Return for a new token:
 Reading a token
-Next token is token "number" (13.2: 2)
-Shifting token "number" (13.2: 2)
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
 Entering state 1
-Stack now 0 6 4 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (13.2: 2)
--> $$ = nterm exp (13.2: 2)
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
 Entering state 12
-Stack now 0 6 4 12
-Reading a token
-Next token is token '^' (13.3: )
-Shifting token '^' (13.3: )
-Entering state 24
-Stack now 0 6 4 12 24
+Stack now 0 4 12
+Return for a new token:
 Reading a token
-Next token is token "number" (13.4: 2)
-Shifting token "number" (13.4: 2)
-Entering state 1
-Stack now 0 6 4 12 24 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (13.4: 2)
--> $$ = nterm exp (13.4: 2)
-Entering state 33
-Stack now 0 6 4 12 24 33
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Return for a new token:
+1.6: syntax error: invalid character: '#'
 Reading a token
-Next token is token ')' (13.5: )
-Reducing stack by rule 12 (line 112):
-   $1 = nterm exp (13.2: 2)
-   $2 = token '^' (13.3: )
-   $3 = nterm exp (13.4: 2)
--> $$ = nterm exp (13.2-4: 4)
-Entering state 12
-Stack now 0 6 4 12
-Next token is token ')' (13.5: )
-Shifting token ')' (13.5: )
-Entering state 27
-Stack now 0 6 4 12 27
-Reducing stack by rule 13 (line 113):
-   $1 = token '(' (13.1: )
-   $2 = nterm exp (13.2-4: 4)
-   $3 = token ')' (13.5: )
--> $$ = nterm exp (13.1-5: 4)
-Entering state 8
-Stack now 0 6 8
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Return for a new token:
 Reading a token
-Next token is token '^' (13.6: )
-Shifting token '^' (13.6: )
-Entering state 24
-Stack now 0 6 8 24
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-8: )
+Entering state 11
+Stack now 0 4 11
+Return for a new token:
 Reading a token
-Next token is token "number" (13.7: 3)
-Shifting token "number" (13.7: 3)
-Entering state 1
-Stack now 0 6 8 24 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (13.7: 3)
--> $$ = nterm exp (13.7: 3)
-Entering state 33
-Stack now 0 6 8 24 33
+Next token is token number (1.10: 1)
+Error: discarding token number (1.10: 1)
+Error: popping token error (1.2-8: )
+Stack now 0 4
+Shifting token error (1.2-10: )
+Entering state 11
+Stack now 0 4 11
+Return for a new token:
 Reading a token
-Next token is token '=' (13.9: )
-Reducing stack by rule 12 (line 112):
-   $1 = nterm exp (13.1-5: 4)
-   $2 = token '^' (13.6: )
-   $3 = nterm exp (13.7: 3)
--> $$ = nterm exp (13.1-7: 64)
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
 Entering state 8
-Stack now 0 6 8
-Next token is token '=' (13.9: )
-Shifting token '=' (13.9: )
+Stack now 0 8
+Return for a new token:
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
 Entering state 19
-Stack now 0 6 8 19
+Stack now 0 8 19
+Return for a new token:
 Reading a token
-Next token is token "number" (13.11-12: 64)
-Shifting token "number" (13.11-12: 64)
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
 Entering state 1
-Stack now 0 6 8 19 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (13.11-12: 64)
--> $$ = nterm exp (13.11-12: 64)
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
 Entering state 28
-Stack now 0 6 8 19 28
+Stack now 0 8 19 28
+Return for a new token:
 Reading a token
-Next token is token '\n' (13.13-14.0: )
-Reducing stack by rule 6 (line 89):
-   $1 = nterm exp (13.1-7: 64)
-   $2 = token '=' (13.9: )
-   $3 = nterm exp (13.11-12: 64)
--> $$ = nterm exp (13.1-12: 64)
+Next token is token '\n' (1.19-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
 Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (13.13-14.0: )
-Shifting token '\n' (13.13-14.0: )
+Stack now 0 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
 Entering state 25
-Stack now 0 6 8 25
-Reducing stack by rule 4 (line 84):
-   $1 = nterm exp (13.1-12: 64)
-   $2 = token '\n' (13.13-14.0: )
--> $$ = nterm line (13.1-14.0: )
-Entering state 18
-Stack now 0 6 18
-Reducing stack by rule 2 (line 79):
-   $1 = nterm input (1.1-13.0: )
-   $2 = nterm line (13.1-14.0: )
--> $$ = nterm input (1.1-14.0: )
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
+Return for a new token:
 Reading a token
 Now at end of input.
-Shifting token "end of input" (14.1: )
+Shifting token end of file (2.1: )
 Entering state 17
 Stack now 0 6 17
 Stack now 0 6 17
-Cleanup: popping token "end of input" (14.1: )
-Cleanup: popping nterm input (1.1-14.0: )
-./calc.at:1363: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-input:
-  | 1//2
-./calc.at:1362:  $PREPARSER ./calc  input
-  | 1 2
-stderr:
-./calc.at:1363:  $PREPARSER ./calc  input
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 stderr:
 Starting parse
 Entering state 0
@@ -71597,100 +71296,162 @@
 Stack now 0
 Cleanup: discarding lookahead token "number" (1.3: 2)
 Stack now 0
+./calc.at:1360: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+  | (!!) + (1 2) = 1
+./calc.at:1362:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
 Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 23
-Stack now 0 8 23
+Next token is token '!' (1.2: )
+Shifting token '!' (1.2: )
+Entering state 5
+Stack now 0 4 5
 Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '/' (1.3: )
-Stack now 0
-./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '!' (1.3: )
+Shifting token '!' (1.3: )
+Entering state 16
+Stack now 0 4 5 16
+Reducing stack by rule 16 (line 129):
+   $1 = token '!' (1.2: )
+   $2 = token '!' (1.3: )
+Stack now 0 4
+Shifting token error (1.2-3: )
+Entering state 11
+Stack now 0 4 11
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Next token is token ')' (1.4: )
+Shifting token ')' (1.4: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-3: )
+   $3 = token ')' (1.4: )
+-> $$ = nterm exp (1.1-4: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token "number" (1.3: 2)
-1.3: syntax error, unexpected number
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token "number" (1.3: 2)
-Stack now 0
-493. calc.at:1367: testing Calculator parse.error=custom  ...
-stderr:
-./calc.at:1367: mv calc.y.tmp calc.y
-
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '+' (1.6: )
+Shifting token '+' (1.6: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
+Next token is token '(' (1.8: )
+Shifting token '(' (1.8: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+Next token is token number (1.9: 1)
+Shifting token number (1.9: 1)
 Entering state 1
-Stack now 0 1
+Stack now 0 8 21 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+   $1 = token number (1.9: 1)
+-> $$ = nterm exp (1.9: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Reading a token
+Next token is token number (1.11: 2)
+1.11: syntax error, unexpected number
+Error: popping nterm exp (1.9: 1)
+Stack now 0 8 21 4
+Shifting token error (1.9-11: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token number (1.11: 2)
+Error: discarding token number (1.11: 2)
+Error: popping token error (1.9-11: )
+Stack now 0 8 21 4
+Shifting token error (1.9-11: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.12: )
+Shifting token ')' (1.12: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.8: )
+   $2 = token error (1.9-11: )
+   $3 = token ')' (1.12: )
+-> $$ = nterm exp (1.8-12: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '=' (1.14: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-4: 1111)
+   $2 = token '+' (1.6: )
+   $3 = nterm exp (1.8-12: 1111)
+-> $$ = nterm exp (1.1-12: 2222)
 Entering state 8
 Stack now 0 8
+Next token is token '=' (1.14: )
+Shifting token '=' (1.14: )
+Entering state 19
+Stack now 0 8 19
 Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 23
-Stack now 0 8 23
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
+Stack now 0 8 19 28
 Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
+Next token is token '\n' (1.17-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-12: 2222)
+   $2 = token '=' (1.14: )
+   $3 = nterm exp (1.16: 1)
+1.1-16: error: 2222 != 1
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
 Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '/' (1.3: )
-Stack now 0
-./calc.at:1367: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-./calc.at:1363: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1363: cat stderr
-./calc.at:1362: "$PERL" -pi -e 'use strict;
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1358: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -71700,12 +71461,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1362: cat stderr
-input:
-  | 1//2
-./calc.at:1363:  $PREPARSER ./calc  input
+./calc.at:1360: cat stderr
+./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -71720,165 +71478,154 @@
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 23
-Stack now 0 8 23
-Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Stack now 0 8
+Next token is token "number" (1.3: 2)
+1.3: syntax error, unexpected number
 Error: popping nterm exp (1.1: 1)
 Stack now 0
-Cleanup: discarding lookahead token '/' (1.3: )
+Cleanup: discarding lookahead token "number" (1.3: 2)
 Stack now 0
+./calc.at:1358: cat stderr
 stderr:
-  | error
-./calc.at:1362:  $PREPARSER ./calc  input
-./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stdout:
-stderr:
-./types.at:139:  $PREPARSER ./test
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token invalid token (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token (1.1: )
-Stack now 0
-stderr:
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
+Next token is token '!' (1.2: )
+Shifting token '!' (1.2: )
+Entering state 5
+Stack now 0 4 5
 Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 23
-Stack now 0 8 23
+Next token is token '!' (1.3: )
+Shifting token '!' (1.3: )
+Entering state 16
+Stack now 0 4 5 16
+Reducing stack by rule 16 (line 129):
+   $1 = token '!' (1.2: )
+   $2 = token '!' (1.3: )
+Stack now 0 4
+Shifting token error (1.2-3: )
+Entering state 11
+Stack now 0 4 11
 Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
+Next token is token ')' (1.4: )
+Shifting token ')' (1.4: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-3: )
+   $3 = token ')' (1.4: )
+-> $$ = nterm exp (1.1-4: 1111)
+Entering state 8
 Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '/' (1.3: )
-Stack now 0
-./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1363: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token invalid token (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token (1.1: )
-Stack now 0
-======== Testing with C++ standard flags: ''
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-./calc.at:1362: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1363: cat stderr
-input:
-./calc.at:1367: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
-  | error
-./calc.at:1362: cat stderr
-./calc.at:1363:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
 Reading a token
-Next token is token "invalid token" (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token "invalid token" (1.1: )
-Stack now 0
-./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1362:  $PREPARSER ./calc  input
-stderr:
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '+' (1.6: )
+Shifting token '+' (1.6: )
+Entering state 21
+Stack now 0 8 21
 Reading a token
-Next token is token "invalid token" (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token "invalid token" (1.1: )
-Stack now 0
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '(' (1.8: )
+Shifting token '(' (1.8: )
+Entering state 4
+Stack now 0 8 21 4
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
+Next token is token number (1.9: 1)
+Shifting token number (1.9: 1)
 Entering state 1
-Stack now 0 1
+Stack now 0 8 21 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+   $1 = token number (1.9: 1)
+-> $$ = nterm exp (1.9: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Reading a token
+Next token is token number (1.11: 2)
+1.11: syntax error, unexpected number
+Error: popping nterm exp (1.9: 1)
+Stack now 0 8 21 4
+Shifting token error (1.9-11: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token number (1.11: 2)
+Error: discarding token number (1.11: 2)
+Error: popping token error (1.9-11: )
+Stack now 0 8 21 4
+Shifting token error (1.9-11: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.12: )
+Shifting token ')' (1.12: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.8: )
+   $2 = token error (1.9-11: )
+   $3 = token ')' (1.12: )
+-> $$ = nterm exp (1.8-12: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '=' (1.14: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-4: 1111)
+   $2 = token '+' (1.6: )
+   $3 = nterm exp (1.8-12: 1111)
+-> $$ = nterm exp (1.1-12: 2222)
 Entering state 8
 Stack now 0 8
-Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
+Next token is token '=' (1.14: )
+Shifting token '=' (1.14: )
 Entering state 19
 Stack now 0 8 19
 Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
 Entering state 1
 Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
 Entering state 28
 Stack now 0 8 19 28
 Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Stack now 0 8 19
-Error: popping token '=' (1.3: )
+Next token is token '\n' (1.17-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-12: 2222)
+   $2 = token '=' (1.14: )
+   $3 = nterm exp (1.16: 1)
+1.1-16: error: 2222 != 1
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
 Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '=' (1.7: )
-Stack now 0
-./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1363: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -71889,50 +71636,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-./calc.at:1363: cat stderr
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Stack now 0 8 19
-Error: popping token '=' (1.3: )
-Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '=' (1.7: )
-Stack now 0
 input:
-  | 1 = 2 = 3
-./calc.at:1363:  $PREPARSER ./calc  input
+input:
 ./calc.at:1362: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -71943,113 +71648,250 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+  | (1 + 1) / (1 - 1)
+./calc.at:1358:  $PREPARSER ./calc  input
+./calc.at:1362: cat stderr
 stderr:
+  | (1 + #) = 1111
+./calc.at:1360:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Return for a new token:
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
 Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Return for a new token:
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Return for a new token:
+Reading a token
+Next token is token number (1.6: 1)
+Shifting token number (1.6: 1)
+Entering state 1
+Stack now 0 4 12 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 30
+Stack now 0 4 12 21 30
+Return for a new token:
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 27
+Stack now 0 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
 Entering state 8
 Stack now 0 8
+Return for a new token:
 Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
-Entering state 19
-Stack now 0 8 19
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 23
+Stack now 0 8 23
+Return for a new token:
 Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Stack now 0 8 23 4
+Return for a new token:
+Reading a token
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
 Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 28
-Stack now 0 8 19 28
+Stack now 0 8 23 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Stack now 0 8 23 4 12
+Return for a new token:
 Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Stack now 0 8 19
-Error: popping token '=' (1.3: )
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 20
+Stack now 0 8 23 4 12 20
+Return for a new token:
+Reading a token
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Stack now 0 8 23 4 12 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 29
+Stack now 0 8 23 4 12 20 29
+Return for a new token:
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Stack now 0 8 23 4 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 27
+Stack now 0 8 23 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 32
+Stack now 0 8 23 32
+Return for a new token:
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 10 (line 115):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
 Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '=' (1.7: )
-Stack now 0
-./calc.at:1362: cat stderr
-./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Return for a new token:
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-input:
+./calc.at:1363: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
 Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
 Entering state 19
 Stack now 0 8 19
 Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
 Entering state 1
 Stack now 0 8 19 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
 Entering state 28
 Stack now 0 8 19 28
 Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Stack now 0 8 19
-Error: popping token '=' (1.3: )
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
 Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '=' (1.7: )
-Stack now 0
-  | 
-  | +1
-./calc.at:1362:  $PREPARSER ./calc  input
-./calc.at:1363: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Stack now 0 3
-Reducing stack by rule 3 (line 96):
-   $1 = token '\n' (1.1-2.0: )
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -72059,177 +71901,174 @@
 Entering state 6
 Stack now 0 6
 Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Stack now 0
-Cleanup: discarding lookahead token '+' (2.1: )
-Stack now 0
-./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1363: cat stderr
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Stack now 0 3
-Reducing stack by rule 3 (line 96):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Return for a new token:
 Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Stack now 0
-Cleanup: discarding lookahead token '+' (2.1: )
-Stack now 0
-input:
-  | 
-  | +1
-./calc.at:1363:  $PREPARSER ./calc  input
-./calc.at:1362: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Return for a new token:
 Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Stack now 0 3
-Reducing stack by rule 3 (line 83):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Return for a new token:
 Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Stack now 0
-Cleanup: discarding lookahead token '+' (2.1: )
-Stack now 0
-./calc.at:1362: cat stderr
-./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1362:  $PREPARSER ./calc  /dev/null
-stderr:
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token number (1.6: 1)
+Shifting token number (1.6: 1)
+Entering state 1
+Stack now 0 4 12 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 30
+Stack now 0 4 12 21 30
+Return for a new token:
 Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of file
-Cleanup: discarding lookahead token end of file (1.1: )
-Stack now 0
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token ')' (1.7: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 27
+Stack now 0 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Stack now 0 8
+Return for a new token:
 Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Stack now 0 3
-Reducing stack by rule 3 (line 83):
-   $1 = token '\n' (1.1-2.0: )
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 23
+Stack now 0 8 23
+Return for a new token:
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Stack now 0 8 23 4
+Return for a new token:
+Reading a token
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
+Entering state 1
+Stack now 0 8 23 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Stack now 0 8 23 4 12
+Return for a new token:
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 20
+Stack now 0 8 23 4 12 20
+Return for a new token:
+Reading a token
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Stack now 0 8 23 4 12 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 29
+Stack now 0 8 23 4 12 20 29
+Return for a new token:
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Stack now 0 8 23 4 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 27
+Stack now 0 8 23 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 32
+Stack now 0 8 23 32
+Return for a new token:
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 10 (line 115):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
-Reducing stack by rule 1 (line 78):
+Reducing stack by rule 1 (line 91):
    $1 = nterm line (1.1-2.0: )
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Stack now 0 6
-Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Stack now 0
-Cleanup: discarding lookahead token '+' (2.1: )
-Stack now 0
-./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1363: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of file
-Cleanup: discarding lookahead token end of file (1.1: )
-Stack now 0
-./calc.at:1363: cat stderr
-./calc.at:1363:  $PREPARSER ./calc  /dev/null
-./calc.at:1362: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-./calc.at:1362: cat stderr
-Starting parse
-Entering state 0
-Stack now 0
+Return for a new token:
 Reading a token
 Now at end of input.
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" (1.1: )
-Stack now 0
-./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 input:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" (1.1: )
-Stack now 0
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (- *) + (1 2) = 1
 ./calc.at:1362:  $PREPARSER ./calc  input
+input:
 stderr:
-stderr:
-stdout:
-./calc.at:1363: "$PERL" -pi -e 'use strict;
+./calc.at:1358: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -72239,7 +72078,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./types.at:139: ./check
 Starting parse
 Entering state 0
 Stack now 0
@@ -72249,298 +72087,230 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-Next token is token ')' (1.2: )
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
 Entering state 11
 Stack now 0 4 11
-Next token is token ')' (1.2: )
-Shifting token ')' (1.2: )
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
 Entering state 26
 Stack now 0 4 11 26
 Reducing stack by rule 14 (line 127):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.2: )
--> $$ = nterm exp (1.1-2: 1111)
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.6: )
-Shifting token '(' (1.6: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token number (1.7: 1)
-Shifting token number (1.7: 1)
-Entering state 1
-Stack now 0 8 21 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.7: 1)
--> $$ = nterm exp (1.7: 1)
-Entering state 12
-Stack now 0 8 21 4 12
-Reading a token
-Next token is token '+' (1.9: )
-Shifting token '+' (1.9: )
-Entering state 21
-Stack now 0 8 21 4 12 21
-Reading a token
-Next token is token number (1.11: 1)
-Shifting token number (1.11: 1)
-Entering state 1
-Stack now 0 8 21 4 12 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11: 1)
--> $$ = nterm exp (1.11: 1)
-Entering state 30
-Stack now 0 8 21 4 12 21 30
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.7: 1)
-   $2 = token '+' (1.9: )
-   $3 = nterm exp (1.11: 1)
--> $$ = nterm exp (1.7-11: 2)
-Entering state 12
-Stack now 0 8 21 4 12
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Stack now 0 8 21 4 12 21
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Stack now 0 8 19
 Reading a token
-Next token is token number (1.15: 1)
-Shifting token number (1.15: 1)
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
 Entering state 1
-Stack now 0 8 21 4 12 21 1
+Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.15: 1)
--> $$ = nterm exp (1.15: 1)
-Entering state 30
-Stack now 0 8 21 4 12 21 30
-Reading a token
-Next token is token '+' (1.17: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.7-11: 2)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15: 1)
--> $$ = nterm exp (1.7-15: 3)
-Entering state 12
-Stack now 0 8 21 4 12
-Next token is token '+' (1.17: )
-Shifting token '+' (1.17: )
-Entering state 21
-Stack now 0 8 21 4 12 21
-Reading a token
-Next token is token ')' (1.18: )
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' (1.17: )
-Stack now 0 8 21 4 12
-Error: popping nterm exp (1.7-15: 3)
-Stack now 0 8 21 4
-Shifting token error (1.7-18: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token ')' (1.18: )
-Shifting token ')' (1.18: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.6: )
-   $2 = token error (1.7-18: )
-   $3 = token ')' (1.18: )
--> $$ = nterm exp (1.6-18: 1111)
-Entering state 30
-Stack now 0 8 21 30
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Stack now 0 8 19 28
 Reading a token
-Next token is token '+' (1.20: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-2: 1111)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6-18: 1111)
--> $$ = nterm exp (1.1-18: 2222)
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.20: )
-Shifting token '+' (1.20: )
-Entering state 21
-Stack now 0 8 21
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
 Reading a token
-Next token is token '(' (1.22: )
-Shifting token '(' (1.22: )
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+  | 1//2
+./calc.at:1363:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
 Entering state 4
-Stack now 0 8 21 4
+Stack now 0 4
 Reading a token
-Next token is token '*' (1.23: )
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.23: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.23: )
-Error: discarding token '*' (1.23: )
-Error: popping token error (1.23: )
-Stack now 0 8 21 4
-Shifting token error (1.23: )
-Entering state 11
-Stack now 0 8 21 4 11
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
+Entering state 2
+Stack now 0 4 2
 Reading a token
-Next token is token '*' (1.25: )
-Error: discarding token '*' (1.25: )
-Error: popping token error (1.23: )
-Stack now 0 8 21 4
-Shifting token error (1.23-25: )
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Stack now 0 4 2 9
+Reducing stack by rule 15 (line 128):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
+Stack now 0 4
+Shifting token error (1.2-4: )
 Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token '*' (1.27: )
-Error: discarding token '*' (1.27: )
-Error: popping token error (1.23-25: )
-Stack now 0 8 21 4
-Shifting token error (1.23-27: )
+Stack now 0 4 11
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Error: popping token error (1.2-4: )
+Stack now 0 4
+Shifting token error (1.2-4: )
 Entering state 11
-Stack now 0 8 21 4 11
+Stack now 0 4 11
 Reading a token
-Next token is token ')' (1.28: )
-Shifting token ')' (1.28: )
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
 Entering state 26
-Stack now 0 8 21 4 11 26
+Stack now 0 4 11 26
 Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.22: )
-   $2 = token error (1.23-27: )
-   $3 = token ')' (1.28: )
--> $$ = nterm exp (1.22-28: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '+' (1.30: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-18: 2222)
-   $2 = token '+' (1.20: )
-   $3 = nterm exp (1.22-28: 1111)
--> $$ = nterm exp (1.1-28: 3333)
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.30: )
-Shifting token '+' (1.30: )
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
 Entering state 21
 Stack now 0 8 21
 Reading a token
-Next token is token '(' (1.32: )
-Shifting token '(' (1.32: )
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
 Entering state 4
 Stack now 0 8 21 4
 Reading a token
-Next token is token number (1.33: 1)
-Shifting token number (1.33: 1)
+Next token is token number (1.10: 1)
+Shifting token number (1.10: 1)
 Entering state 1
 Stack now 0 8 21 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.33: 1)
--> $$ = nterm exp (1.33: 1)
-Entering state 12
-Stack now 0 8 21 4 12
-Reading a token
-Next token is token '*' (1.35: )
-Shifting token '*' (1.35: )
-Entering state 22
-Stack now 0 8 21 4 12 22
-Reading a token
-Next token is token number (1.37: 2)
-Shifting token number (1.37: 2)
-Entering state 1
-Stack now 0 8 21 4 12 22 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.37: 2)
--> $$ = nterm exp (1.37: 2)
-Entering state 31
-Stack now 0 8 21 4 12 22 31
-Reading a token
-Next token is token '*' (1.39: )
-Reducing stack by rule 9 (line 114):
-   $1 = nterm exp (1.33: 1)
-   $2 = token '*' (1.35: )
-   $3 = nterm exp (1.37: 2)
--> $$ = nterm exp (1.33-37: 2)
+   $1 = token number (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
 Entering state 12
 Stack now 0 8 21 4 12
-Next token is token '*' (1.39: )
-Shifting token '*' (1.39: )
-Entering state 22
-Stack now 0 8 21 4 12 22
 Reading a token
-Next token is token '*' (1.41: )
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' (1.39: )
-Stack now 0 8 21 4 12
-Error: popping nterm exp (1.33-37: 2)
+Next token is token number (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
 Stack now 0 8 21 4
-Shifting token error (1.33-41: )
+Shifting token error (1.10-12: )
 Entering state 11
 Stack now 0 8 21 4 11
-Next token is token '*' (1.41: )
-Error: discarding token '*' (1.41: )
-Error: popping token error (1.33-41: )
+Next token is token number (1.12: 2)
+Error: discarding token number (1.12: 2)
+Error: popping token error (1.10-12: )
 Stack now 0 8 21 4
-Shifting token error (1.33-41: )
+Shifting token error (1.10-12: )
 Entering state 11
 Stack now 0 8 21 4 11
 Reading a token
-Next token is token ')' (1.42: )
-Shifting token ')' (1.42: )
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
 Entering state 26
 Stack now 0 8 21 4 11 26
 Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.32: )
-   $2 = token error (1.33-41: )
-   $3 = token ')' (1.42: )
--> $$ = nterm exp (1.32-42: 1111)
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
 Entering state 30
 Stack now 0 8 21 30
 Reading a token
-Next token is token '=' (1.44: )
+Next token is token '=' (1.15: )
 Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-28: 3333)
-   $2 = token '+' (1.30: )
-   $3 = nterm exp (1.32-42: 1111)
--> $$ = nterm exp (1.1-42: 4444)
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '=' (1.44: )
-Shifting token '=' (1.44: )
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
 Entering state 19
 Stack now 0 8 19
 Reading a token
-Next token is token number (1.46: 1)
-Shifting token number (1.46: 1)
+Next token is token number (1.17: 1)
+Shifting token number (1.17: 1)
 Entering state 1
 Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.46: 1)
--> $$ = nterm exp (1.46: 1)
+   $1 = token number (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
 Entering state 28
 Stack now 0 8 19 28
 Reading a token
-Next token is token '\n' (1.47-2.0: )
+Next token is token '\n' (1.18-2.0: )
 Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-42: 4444)
-   $2 = token '=' (1.44: )
-   $3 = nterm exp (1.46: 1)
-1.1-46: error: 4444 != 1
--> $$ = nterm exp (1.1-46: 4444)
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.47-2.0: )
-Shifting token '\n' (1.47-2.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-46: 4444)
-   $2 = token '\n' (1.47-2.0: )
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -72557,10 +72327,50 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o test.cc test.y
-./calc.at:1363: cat stderr
+./calc.at:1367: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
+./calc.at:1360: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+./calc.at:1358: cat stderr
 ./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1360: cat stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 23
+Stack now 0 8 23
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '/' (1.3: )
+Stack now 0
 stderr:
+./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -72570,298 +72380,2259 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-Next token is token ')' (1.2: )
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
+Entering state 2
+Stack now 0 4 2
+Reading a token
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Stack now 0 4 2 9
+Reducing stack by rule 15 (line 128):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
+Stack now 0 4
+Shifting token error (1.2-4: )
 Entering state 11
 Stack now 0 4 11
-Next token is token ')' (1.2: )
-Shifting token ')' (1.2: )
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Error: popping token error (1.2-4: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
 Entering state 26
 Stack now 0 4 11 26
 Reducing stack by rule 14 (line 127):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.2: )
--> $$ = nterm exp (1.1-2: 1111)
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
 Entering state 21
 Stack now 0 8 21
 Reading a token
-Next token is token '(' (1.6: )
-Shifting token '(' (1.6: )
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
 Entering state 4
 Stack now 0 8 21 4
 Reading a token
-Next token is token number (1.7: 1)
-Shifting token number (1.7: 1)
+Next token is token number (1.10: 1)
+Shifting token number (1.10: 1)
 Entering state 1
 Stack now 0 8 21 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.7: 1)
--> $$ = nterm exp (1.7: 1)
-Entering state 12
-Stack now 0 8 21 4 12
-Reading a token
-Next token is token '+' (1.9: )
-Shifting token '+' (1.9: )
-Entering state 21
-Stack now 0 8 21 4 12 21
-Reading a token
-Next token is token number (1.11: 1)
-Shifting token number (1.11: 1)
-Entering state 1
-Stack now 0 8 21 4 12 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11: 1)
--> $$ = nterm exp (1.11: 1)
-Entering state 30
-Stack now 0 8 21 4 12 21 30
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.7: 1)
-   $2 = token '+' (1.9: )
-   $3 = nterm exp (1.11: 1)
--> $$ = nterm exp (1.7-11: 2)
-Entering state 12
-Stack now 0 8 21 4 12
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Stack now 0 8 21 4 12 21
-Reading a token
-Next token is token number (1.15: 1)
-Shifting token number (1.15: 1)
-Entering state 1
-Stack now 0 8 21 4 12 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.15: 1)
--> $$ = nterm exp (1.15: 1)
-Entering state 30
-Stack now 0 8 21 4 12 21 30
-Reading a token
-Next token is token '+' (1.17: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.7-11: 2)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15: 1)
--> $$ = nterm exp (1.7-15: 3)
+   $1 = token number (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
 Entering state 12
 Stack now 0 8 21 4 12
-Next token is token '+' (1.17: )
-Shifting token '+' (1.17: )
-Entering state 21
-Stack now 0 8 21 4 12 21
 Reading a token
-Next token is token ')' (1.18: )
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' (1.17: )
-Stack now 0 8 21 4 12
-Error: popping nterm exp (1.7-15: 3)
+Next token is token number (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
 Stack now 0 8 21 4
-Shifting token error (1.7-18: )
+Shifting token error (1.10-12: )
 Entering state 11
 Stack now 0 8 21 4 11
-Next token is token ')' (1.18: )
-Shifting token ')' (1.18: )
+Next token is token number (1.12: 2)
+Error: discarding token number (1.12: 2)
+Error: popping token error (1.10-12: )
+Stack now 0 8 21 4
+Shifting token error (1.10-12: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
 Entering state 26
 Stack now 0 8 21 4 11 26
 Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.6: )
-   $2 = token error (1.7-18: )
-   $3 = token ')' (1.18: )
--> $$ = nterm exp (1.6-18: 1111)
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
 Entering state 30
 Stack now 0 8 21 30
 Reading a token
-Next token is token '+' (1.20: )
+Next token is token '=' (1.15: )
 Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-2: 1111)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6-18: 1111)
--> $$ = nterm exp (1.1-18: 2222)
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.20: )
-Shifting token '+' (1.20: )
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.17: 1)
+Shifting token number (1.17: 1)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+488. calc.at:1358:  ok
+input:
+  | (# + 1) = 1111
+./calc.at:1360:  $PREPARSER ./calc  input
+./calc.at:1362: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 23
+Stack now 0 8 23
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '/' (1.3: )
+Stack now 0
+stderr:
+./calc.at:1362: cat stderr
+
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.6: 1)
+Error: discarding token number (1.6: 1)
+Error: popping token error (1.2-4: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | (* *) + (*) + (*)
+./calc.at:1362:  $PREPARSER ./calc  input
+./calc.at:1363: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.6: 1)
+Error: discarding token number (1.6: 1)
+Error: popping token error (1.2-4: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+./calc.at:1363: cat stderr
+input:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
 Entering state 21
 Stack now 0 8 21
 Reading a token
-Next token is token '(' (1.22: )
-Shifting token '(' (1.22: )
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
 Entering state 4
 Stack now 0 8 21 4
 Reading a token
-Next token is token '*' (1.23: )
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.23: )
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
 Entering state 11
 Stack now 0 8 21 4 11
-Next token is token '*' (1.23: )
-Error: discarding token '*' (1.23: )
-Error: popping token error (1.23: )
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
+Error: popping token error (1.10: )
 Stack now 0 8 21 4
-Shifting token error (1.23: )
+Shifting token error (1.10: )
 Entering state 11
 Stack now 0 8 21 4 11
 Reading a token
-Next token is token '*' (1.25: )
-Error: discarding token '*' (1.25: )
-Error: popping token error (1.23: )
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.9-11: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
 Stack now 0 8 21 4
-Shifting token error (1.23-25: )
+Reading a token
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
+Error: popping token error (1.16: )
+Stack now 0 8 21 4
+Shifting token error (1.16: )
 Entering state 11
 Stack now 0 8 21 4 11
 Reading a token
-Next token is token '*' (1.27: )
-Error: discarding token '*' (1.27: )
-Error: popping token error (1.23-25: )
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-17: 3333)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1360: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | error
+./calc.at:1363:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token "invalid token" (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" (1.1: )
+Stack now 0
+494. calc.at:1368: testing Calculator parse.error=custom %locations api.prefix={calc}  ...
+./calc.at:1360: cat stderr
+./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1368: mv calc.y.tmp calc.y
+
+stderr:
+./calc.at:1368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
 Stack now 0 8 21 4
-Shifting token error (1.23-27: )
+Reading a token
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
+Error: popping token error (1.10: )
+Stack now 0 8 21 4
+Shifting token error (1.10: )
 Entering state 11
 Stack now 0 8 21 4 11
 Reading a token
-Next token is token ')' (1.28: )
-Shifting token ')' (1.28: )
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
 Entering state 26
 Stack now 0 8 21 4 11 26
 Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.22: )
-   $2 = token error (1.23-27: )
-   $3 = token ')' (1.28: )
--> $$ = nterm exp (1.22-28: 1111)
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.9-11: 1111)
 Entering state 30
 Stack now 0 8 21 30
 Reading a token
-Next token is token '+' (1.30: )
+Next token is token '+' (1.13: )
 Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-18: 2222)
-   $2 = token '+' (1.20: )
-   $3 = nterm exp (1.22-28: 1111)
--> $$ = nterm exp (1.1-28: 3333)
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.30: )
-Shifting token '+' (1.30: )
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
 Entering state 21
 Stack now 0 8 21
 Reading a token
-Next token is token '(' (1.32: )
-Shifting token '(' (1.32: )
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
 Entering state 4
 Stack now 0 8 21 4
 Reading a token
-Next token is token number (1.33: 1)
-Shifting token number (1.33: 1)
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
+Error: popping token error (1.16: )
+Stack now 0 8 21 4
+Shifting token error (1.16: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-17: 3333)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | (1 + # + 1) = 1111
+./calc.at:1360:  $PREPARSER ./calc  input
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token "invalid token" (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" (1.1: )
+Stack now 0
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
 Entering state 1
-Stack now 0 8 21 4 1
+Stack now 0 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.33: 1)
--> $$ = nterm exp (1.33: 1)
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
 Entering state 12
-Stack now 0 8 21 4 12
+Stack now 0 4 12
 Reading a token
-Next token is token '*' (1.35: )
-Shifting token '*' (1.35: )
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-8: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.10: 1)
+Error: discarding token number (1.10: 1)
+Error: popping token error (1.2-8: )
+Stack now 0 4
+Shifting token error (1.2-10: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1362: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1362: cat stderr
+./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1363: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+input:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-8: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.10: 1)
+Error: discarding token number (1.10: 1)
+Error: popping token error (1.2-8: )
+Stack now 0 4
+Shifting token error (1.2-10: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1363: cat stderr
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1362:  $PREPARSER ./calc  input
+./calc.at:1360: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+./calc.at:1368: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
 Entering state 22
-Stack now 0 8 21 4 12 22
+Stack now 0 8 21 30 22
 Reading a token
-Next token is token number (1.37: 2)
-Shifting token number (1.37: 2)
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
 Entering state 1
-Stack now 0 8 21 4 12 22 1
+Stack now 0 8 21 30 22 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.37: 2)
--> $$ = nterm exp (1.37: 2)
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
 Entering state 31
-Stack now 0 8 21 4 12 22 31
+Stack now 0 8 21 30 22 31
 Reading a token
-Next token is token '*' (1.39: )
+Next token is token '+' (1.11: )
 Reducing stack by rule 9 (line 114):
-   $1 = nterm exp (1.33: 1)
-   $2 = token '*' (1.35: )
-   $3 = nterm exp (1.37: 2)
--> $$ = nterm exp (1.33-37: 2)
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
+Reading a token
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Stack now 0 8 21 5 14
+Reducing stack by rule 17 (line 130):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+input:
+./calc.at:1360: cat stderr
+./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 = 2 = 3
+./calc.at:1363:  $PREPARSER ./calc  input
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Stack now 0 8 19
+Error: popping token '=' (1.3: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '=' (1.7: )
+Stack now 0
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
+Reading a token
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Stack now 0 8 21 5 14
+Reducing stack by rule 17 (line 130):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1362: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1360:  $PREPARSER ./calc  input
+./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+input:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
 Entering state 12
-Stack now 0 8 21 4 12
-Next token is token '*' (1.39: )
-Shifting token '*' (1.39: )
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+Next token is token number (1.6: 1)
+Shifting token number (1.6: 1)
+Entering state 1
+Stack now 0 4 12 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 30
+Stack now 0 4 12 21 30
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 27
+Stack now 0 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 23
+Stack now 0 8 23
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Stack now 0 8 23 4
+Reading a token
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
+Entering state 1
+Stack now 0 8 23 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Stack now 0 8 23 4 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 20
+Stack now 0 8 23 4 12 20
+Reading a token
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Stack now 0 8 23 4 12 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 29
+Stack now 0 8 23 4 12 20 29
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Stack now 0 8 23 4 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 27
+Stack now 0 8 23 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 32
+Stack now 0 8 23 32
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 10 (line 115):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+  | 1 + 2 * 3 + !- ++
+stderr:
+./calc.at:1362:  $PREPARSER ./calc  input
+./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Stack now 0 8 19
+Error: popping token '=' (1.3: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '=' (1.7: )
+Stack now 0
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
 Entering state 22
-Stack now 0 8 21 4 12 22
+Stack now 0 8 21 30 22
 Reading a token
-Next token is token '*' (1.41: )
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' (1.39: )
-Stack now 0 8 21 4 12
-Error: popping nterm exp (1.33-37: 2)
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Stack now 0 8 21 5 13
+Reducing stack by rule 18 (line 131):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+stderr:
+./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+Next token is token number (1.6: 1)
+Shifting token number (1.6: 1)
+Entering state 1
+Stack now 0 4 12 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 30
+Stack now 0 4 12 21 30
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 27
+Stack now 0 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 23
+Stack now 0 8 23
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Stack now 0 8 23 4
+Reading a token
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
+Entering state 1
+Stack now 0 8 23 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Stack now 0 8 23 4 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 20
+Stack now 0 8 23 4 12 20
+Reading a token
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Stack now 0 8 23 4 12 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 29
+Stack now 0 8 23 4 12 20 29
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Stack now 0 8 23 4 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 27
+Stack now 0 8 23 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 32
+Stack now 0 8 23 32
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 10 (line 115):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1363: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Stack now 0 8 21 5 13
+Reducing stack by rule 18 (line 131):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1363: cat stderr
+./calc.at:1360: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+./calc.at:1360: cat stderr
+./calc.at:1362: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | 
+  | +1
+./calc.at:1363:  $PREPARSER ./calc  input
+./calc.at:1362: cat stderr
+stderr:
+489. calc.at:1360:  ok
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Stack now 0 3
+Reducing stack by rule 3 (line 83):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Stack now 0
+Cleanup: discarding lookahead token '+' (2.1: )
+Stack now 0
+./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 1 + 2 * 3 + !* ++
+./calc.at:1362:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Stack now 0 3
+Reducing stack by rule 3 (line 83):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Stack now 0
+Cleanup: discarding lookahead token '+' (2.1: )
+Stack now 0
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
+Reading a token
+Next token is token '*' (1.14: )
+Shifting token '*' (1.14: )
+Entering state 15
+Stack now 0 8 21 5 15
+Reducing stack by rule 19 (line 132):
+   $1 = token '!' (1.13: )
+   $2 = token '*' (1.14: )
+1.14: memory exhausted
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1363: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 21 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 114):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
+Reading a token
+Next token is token '*' (1.14: )
+Shifting token '*' (1.14: )
+Entering state 15
+Stack now 0 8 21 5 15
+Reducing stack by rule 19 (line 132):
+   $1 = token '!' (1.13: )
+   $2 = token '*' (1.14: )
+1.14: memory exhausted
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1363: cat stderr
+./calc.at:1363:  $PREPARSER ./calc  /dev/null
+./calc.at:1362: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Now at end of input.
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" (1.1: )
+Stack now 0
+./calc.at:1362: cat stderr
+./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+495. calc.at:1369: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Now at end of input.
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" (1.1: )
+Stack now 0
+./calc.at:1369: mv calc.y.tmp calc.y
+
+input:
+./calc.at:1369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+  | (#) + (#) = 2222
+./calc.at:1362:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
+Entering state 4
 Stack now 0 8 21 4
-Shifting token error (1.33-41: )
+Reading a token
+1.8: syntax error: invalid character: '#'
+Shifting token error (1.8: )
 Entering state 11
 Stack now 0 8 21 4 11
-Next token is token '*' (1.41: )
-Error: discarding token '*' (1.41: )
-Error: popping token error (1.33-41: )
+Next token is token invalid token (1.8: )
+Error: discarding token invalid token (1.8: )
+Error: popping token error (1.8: )
 Stack now 0 8 21 4
-Shifting token error (1.33-41: )
+Shifting token error (1.8: )
 Entering state 11
 Stack now 0 8 21 4 11
 Reading a token
-Next token is token ')' (1.42: )
-Shifting token ')' (1.42: )
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
 Entering state 26
 Stack now 0 8 21 4 11 26
 Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.32: )
-   $2 = token error (1.33-41: )
-   $3 = token ')' (1.42: )
--> $$ = nterm exp (1.32-42: 1111)
+   $1 = token '(' (1.7: )
+   $2 = token error (1.8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
 Entering state 30
 Stack now 0 8 21 30
 Reading a token
-Next token is token '=' (1.44: )
+Next token is token '=' (1.11: )
 Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-28: 3333)
-   $2 = token '+' (1.30: )
-   $3 = nterm exp (1.32-42: 1111)
--> $$ = nterm exp (1.1-42: 4444)
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '=' (1.44: )
-Shifting token '=' (1.44: )
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
 Entering state 19
 Stack now 0 8 19
 Reading a token
-Next token is token number (1.46: 1)
-Shifting token number (1.46: 1)
+Next token is token number (1.13-16: 2222)
+Shifting token number (1.13-16: 2222)
 Entering state 1
 Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.46: 1)
--> $$ = nterm exp (1.46: 1)
+   $1 = token number (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
 Entering state 28
 Stack now 0 8 19 28
 Reading a token
-Next token is token '\n' (1.47-2.0: )
+Next token is token '\n' (1.17-2.0: )
 Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-42: 4444)
-   $2 = token '=' (1.44: )
-   $3 = nterm exp (1.46: 1)
-1.1-46: error: 4444 != 1
--> $$ = nterm exp (1.1-46: 4444)
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.47-2.0: )
-Shifting token '\n' (1.47-2.0: )
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-46: 4444)
-   $2 = token '\n' (1.47-2.0: )
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1363: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+1.8: syntax error: invalid character: '#'
+Shifting token error (1.8: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token invalid token (1.8: )
+Error: discarding token invalid token (1.8: )
+Error: popping token error (1.8: )
+Stack now 0 8 21 4
+Shifting token error (1.8: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 127):
+   $1 = token '(' (1.7: )
+   $2 = token error (1.8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token number (1.13-16: 2222)
+Shifting token number (1.13-16: 2222)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -72878,10 +74649,11 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1363: cat stderr
+./calc.at:1369: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
 input:
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 ./calc.at:1363:  $PREPARSER ./calc  input
-stderr:
 ./calc.at:1362: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -72892,6 +74664,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
 ./calc.at:1362: cat stderr
 Starting parse
 Entering state 0
@@ -73211,10 +74984,9 @@
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
 stderr:
-  | (!!) + (1 2) = 1
-./calc.at:1362:  $PREPARSER ./calc  input
+stderr:
+input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -73533,6 +75305,13 @@
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 stderr:
+  | (1 + #) = 1111
+./calc.at:1362:  $PREPARSER ./calc  input
+stdout:
+stdout:
+stderr:
+./types.at:139:  $PREPARSER ./test
+./calc.at:1367: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
 Starting parse
 Entering state 0
 Stack now 0
@@ -73542,121 +75321,79 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-Next token is token '!' (1.2: )
-Shifting token '!' (1.2: )
-Entering state 5
-Stack now 0 4 5
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
 Reading a token
-Next token is token '!' (1.3: )
-Shifting token '!' (1.3: )
-Entering state 16
-Stack now 0 4 5 16
-Reducing stack by rule 16 (line 129):
-   $1 = token '!' (1.2: )
-   $2 = token '!' (1.3: )
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
 Stack now 0 4
-Shifting token error (1.2-3: )
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token ')' (1.4: )
-Shifting token ')' (1.4: )
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
 Entering state 26
 Stack now 0 4 11 26
 Reducing stack by rule 14 (line 127):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-3: )
-   $3 = token ')' (1.4: )
--> $$ = nterm exp (1.1-4: 1111)
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '+' (1.6: )
-Shifting token '+' (1.6: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.8: )
-Shifting token '(' (1.8: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token number (1.9: 1)
-Shifting token number (1.9: 1)
-Entering state 1
-Stack now 0 8 21 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 1)
--> $$ = nterm exp (1.9: 1)
-Entering state 12
-Stack now 0 8 21 4 12
-Reading a token
-Next token is token number (1.11: 2)
-1.11: syntax error, unexpected number
-Error: popping nterm exp (1.9: 1)
-Stack now 0 8 21 4
-Shifting token error (1.9-11: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token number (1.11: 2)
-Error: discarding token number (1.11: 2)
-Error: popping token error (1.9-11: )
-Stack now 0 8 21 4
-Shifting token error (1.9-11: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.12: )
-Shifting token ')' (1.12: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.8: )
-   $2 = token error (1.9-11: )
-   $3 = token ')' (1.12: )
--> $$ = nterm exp (1.8-12: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '=' (1.14: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-4: 1111)
-   $2 = token '+' (1.6: )
-   $3 = nterm exp (1.8-12: 1111)
--> $$ = nterm exp (1.1-12: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.14: )
-Shifting token '=' (1.14: )
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
 Entering state 19
 Stack now 0 8 19
 Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
 Entering state 1
 Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
 Entering state 28
 Stack now 0 8 19 28
 Reading a token
-Next token is token '\n' (1.17-2.0: )
+Next token is token '\n' (1.15-2.0: )
 Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-12: 2222)
-   $2 = token '=' (1.14: )
-   $3 = nterm exp (1.16: 1)
-1.1-16: error: 2222 != 1
--> $$ = nterm exp (1.1-16: 2222)
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -73673,7 +75410,49 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1363: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
 ./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1367: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c
+
+./calc.at:1363: cat stderr
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1367:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+451. types.at:139:  ok
 stderr:
 Starting parse
 Entering state 0
@@ -73684,121 +75463,79 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-Next token is token '!' (1.2: )
-Shifting token '!' (1.2: )
-Entering state 5
-Stack now 0 4 5
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
 Reading a token
-Next token is token '!' (1.3: )
-Shifting token '!' (1.3: )
-Entering state 16
-Stack now 0 4 5 16
-Reducing stack by rule 16 (line 129):
-   $1 = token '!' (1.2: )
-   $2 = token '!' (1.3: )
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
 Stack now 0 4
-Shifting token error (1.2-3: )
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token ')' (1.4: )
-Shifting token ')' (1.4: )
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
 Entering state 26
 Stack now 0 4 11 26
 Reducing stack by rule 14 (line 127):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-3: )
-   $3 = token ')' (1.4: )
--> $$ = nterm exp (1.1-4: 1111)
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '+' (1.6: )
-Shifting token '+' (1.6: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.8: )
-Shifting token '(' (1.8: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token number (1.9: 1)
-Shifting token number (1.9: 1)
-Entering state 1
-Stack now 0 8 21 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 1)
--> $$ = nterm exp (1.9: 1)
-Entering state 12
-Stack now 0 8 21 4 12
-Reading a token
-Next token is token number (1.11: 2)
-1.11: syntax error, unexpected number
-Error: popping nterm exp (1.9: 1)
-Stack now 0 8 21 4
-Shifting token error (1.9-11: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token number (1.11: 2)
-Error: discarding token number (1.11: 2)
-Error: popping token error (1.9-11: )
-Stack now 0 8 21 4
-Shifting token error (1.9-11: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.12: )
-Shifting token ')' (1.12: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.8: )
-   $2 = token error (1.9-11: )
-   $3 = token ')' (1.12: )
--> $$ = nterm exp (1.8-12: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '=' (1.14: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-4: 1111)
-   $2 = token '+' (1.6: )
-   $3 = nterm exp (1.8-12: 1111)
--> $$ = nterm exp (1.1-12: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.14: )
-Shifting token '=' (1.14: )
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
 Entering state 19
 Stack now 0 8 19
 Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
 Entering state 1
 Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
 Entering state 28
 Stack now 0 8 19 28
 Reading a token
-Next token is token '\n' (1.17-2.0: )
+Next token is token '\n' (1.15-2.0: )
 Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-12: 2222)
-   $2 = token '=' (1.14: )
-   $3 = nterm exp (1.16: 1)
-1.1-16: error: 2222 != 1
--> $$ = nterm exp (1.1-16: 2222)
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -73815,34 +75552,12 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1363: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1362: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1363: cat stderr
-./calc.at:1362: cat stderr
+stderr:
 input:
   | (!!) + (1 2) = 1
 ./calc.at:1363:  $PREPARSER ./calc  input
-./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./calc.at:1367: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 stderr:
-input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -73983,158 +75698,25 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-  | (- *) + (1 2) = 1
-./calc.at:1362:  $PREPARSER ./calc  input
+./calc.at:1362: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+
+input:
+  | 1 2
+./calc.at:1367:  $PREPARSER ./calc  input
 stderr:
 ./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
-Entering state 2
-Stack now 0 4 2
-Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Stack now 0 4 2 9
-Reducing stack by rule 15 (line 128):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Error: popping token error (1.2-4: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token number (1.10: 1)
-Shifting token number (1.10: 1)
-Entering state 1
-Stack now 0 8 21 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.10: 1)
--> $$ = nterm exp (1.10: 1)
-Entering state 12
-Stack now 0 8 21 4 12
-Reading a token
-Next token is token number (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Stack now 0 8 21 4
-Shifting token error (1.10-12: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token number (1.12: 2)
-Error: discarding token number (1.12: 2)
-Error: popping token error (1.10-12: )
-Stack now 0 8 21 4
-Shifting token error (1.10-12: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '=' (1.15: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.17: 1)
-Shifting token number (1.17: 1)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.17: 1)
--> $$ = nterm exp (1.17: 1)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 2222)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1362: cat stderr
+syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -74276,7 +75858,11 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+input:
+  | (# + 1) = 1111
+./calc.at:1362:  $PREPARSER ./calc  input
 stderr:
 ./calc.at:1363: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
@@ -74297,129 +75883,180 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
-Entering state 2
-Stack now 0 4 2
+1.2: syntax error: invalid character: '#'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
 Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Stack now 0 4 2 9
-Reducing stack by rule 15 (line 128):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Error: popping token error (1.2: )
 Stack now 0 4
 Shifting token error (1.2-4: )
 Entering state 11
 Stack now 0 4 11
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
+Reading a token
+Next token is token number (1.6: 1)
+Error: discarding token number (1.6: 1)
 Error: popping token error (1.2-4: )
 Stack now 0 4
-Shifting token error (1.2-4: )
+Shifting token error (1.2-6: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
 Entering state 26
 Stack now 0 4 11 26
 Reducing stack by rule 14 (line 127):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Stack now 0 8 21 4
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Stack now 0 8 19
 Reading a token
-Next token is token number (1.10: 1)
-Shifting token number (1.10: 1)
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
 Entering state 1
-Stack now 0 8 21 4 1
+Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.10: 1)
--> $$ = nterm exp (1.10: 1)
-Entering state 12
-Stack now 0 8 21 4 12
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Stack now 0 8 19 28
 Reading a token
-Next token is token number (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Stack now 0 8 21 4
-Shifting token error (1.10-12: )
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+496. calc.at:1370: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} api.push-pull=both api.pure=full  ...
+./calc.at:1370: mv calc.y.tmp calc.y
+
+./calc.at:1363: cat stderr
+./calc.at:1370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1367: cat stderr
+input:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Shifting token error (1.2: )
 Entering state 11
-Stack now 0 8 21 4 11
-Next token is token number (1.12: 2)
-Error: discarding token number (1.12: 2)
-Error: popping token error (1.10-12: )
-Stack now 0 8 21 4
-Shifting token error (1.10-12: )
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
 Entering state 11
-Stack now 0 8 21 4 11
+Stack now 0 4 11
 Reading a token
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.6: 1)
+Error: discarding token number (1.6: 1)
+Error: popping token error (1.2-4: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
 Entering state 26
-Stack now 0 8 21 4 11 26
+Stack now 0 4 11 26
 Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '=' (1.15: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
 Entering state 19
 Stack now 0 8 19
 Reading a token
-Next token is token number (1.17: 1)
-Shifting token number (1.17: 1)
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
 Entering state 1
 Stack now 0 8 19 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.17: 1)
--> $$ = nterm exp (1.17: 1)
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
 Entering state 28
 Stack now 0 8 19 28
 Reading a token
-Next token is token '\n' (1.18-2.0: )
+Next token is token '\n' (1.15-2.0: )
 Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 2222)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -74436,8 +76073,15 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1363: cat stderr
+  | (- *) + (1 2) = 1
+./calc.at:1363:  $PREPARSER ./calc  input
 input:
+  | 1//2
+./calc.at:1367:  $PREPARSER ./calc  input
+stderr:
+stderr:
+syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
+./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1362: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -74448,9 +76092,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | (- *) + (1 2) = 1
-./calc.at:1363:  $PREPARSER ./calc  input
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -74599,10 +76240,11 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1362: cat stderr
+stderr:
+syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
 ./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1362: cat stderr
 stderr:
-input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -74751,9 +76393,26 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-  | (* *) + (*) + (*)
+stderr:
+./calc.at:1367: cat stderr
+./calc.at:1370: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
+input:
+stdout:
+  | (1 + # + 1) = 1111
 ./calc.at:1362:  $PREPARSER ./calc  input
+./calc.at:1368: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
+./calc.at:1363: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
+input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -74763,132 +76422,95 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
 Entering state 11
 Stack now 0 4 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
-Error: popping token error (1.2: )
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
 Stack now 0 4
-Shifting token error (1.2: )
+Shifting token error (1.2-6: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Error: popping token error (1.2: )
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Error: popping token error (1.2-6: )
 Stack now 0 4
-Shifting token error (1.2-4: )
+Shifting token error (1.2-8: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
-Error: popping token error (1.10: )
-Stack now 0 8 21 4
-Shifting token error (1.10: )
+Next token is token number (1.10: 1)
+Error: discarding token number (1.10: 1)
+Error: popping token error (1.2-8: )
+Stack now 0 4
+Shifting token error (1.2-10: )
 Entering state 11
-Stack now 0 8 21 4 11
+Stack now 0 4 11
 Reading a token
 Next token is token ')' (1.11: )
 Shifting token ')' (1.11: )
 Entering state 26
-Stack now 0 8 21 4 11 26
+Stack now 0 4 11 26
 Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
    $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
+-> $$ = nterm exp (1.1-11: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
-Entering state 4
-Stack now 0 8 21 4
 Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
-Error: popping token error (1.16: )
-Stack now 0 8 21 4
-Shifting token error (1.16: )
-Entering state 11
-Stack now 0 8 21 4 11
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 19
+Stack now 0 8 19
 Reading a token
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 30
-Stack now 0 8 21 30
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 28
+Stack now 0 8 19 28
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
+Next token is token '\n' (1.19-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 3333)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -74905,19 +76527,40 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1363: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+  | error
+./calc.at:1367:  $PREPARSER ./calc  input
+./calc.at:1368: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c
+
 ./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 ./calc.at:1363: cat stderr
+input:
+stderr:
+stderr:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1368:  $PREPARSER ./calc  input
+syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -74927,132 +76570,95 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
 Entering state 11
 Stack now 0 4 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
-Error: popping token error (1.2: )
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
 Stack now 0 4
-Shifting token error (1.2: )
+Shifting token error (1.2-6: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Error: popping token error (1.2: )
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Error: popping token error (1.2-6: )
 Stack now 0 4
-Shifting token error (1.2-4: )
+Shifting token error (1.2-8: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
-Error: popping token error (1.10: )
-Stack now 0 8 21 4
-Shifting token error (1.10: )
+Next token is token number (1.10: 1)
+Error: discarding token number (1.10: 1)
+Error: popping token error (1.2-8: )
+Stack now 0 4
+Shifting token error (1.2-10: )
 Entering state 11
-Stack now 0 8 21 4 11
+Stack now 0 4 11
 Reading a token
 Next token is token ')' (1.11: )
 Shifting token ')' (1.11: )
 Entering state 26
-Stack now 0 8 21 4 11 26
+Stack now 0 4 11 26
 Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
    $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
+-> $$ = nterm exp (1.1-11: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
-Entering state 4
-Stack now 0 8 21 4
 Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
-Error: popping token error (1.16: )
-Stack now 0 8 21 4
-Shifting token error (1.16: )
-Entering state 11
-Stack now 0 8 21 4 11
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 19
+Stack now 0 8 19
 Reading a token
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 30
-Stack now 0 8 21 30
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 28
+Stack now 0 8 19 28
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
+Next token is token '\n' (1.19-2.0: )
+Reducing stack by rule 6 (line 102):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
 Entering state 25
 Stack now 0 8 25
 Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 3333)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -75069,6 +76675,14 @@
 Stack now 0 6 17
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+input:
+./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+  | (* *) + (*) + (*)
+./calc.at:1363:  $PREPARSER ./calc  input
+syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+stderr:
 ./calc.at:1362: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -75079,12 +76693,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | (* *) + (*) + (*)
-./calc.at:1362: cat stderr
-./calc.at:1363:  $PREPARSER ./calc  input
 stderr:
-input:
+./calc.at:1368: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -75236,90 +76846,12 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1362:  $PREPARSER ./calc  input
+input:
 ./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 114):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
-Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Stack now 0 8 21 5 14
-Reducing stack by rule 17 (line 130):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
+  | 1 2
+./calc.at:1368:  $PREPARSER ./calc  input
+./calc.at:1362: cat stderr
+./calc.at:1367: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -75472,190 +77004,329 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1362:  $PREPARSER ./calc  input
+  | 1 = 2 = 3
+./calc.at:1367:  $PREPARSER ./calc  input
+1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+stderr:
+./calc.at:1363: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
 Entering state 1
-Stack now 0 1
+Stack now 0 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
 Entering state 21
-Stack now 0 8 21
+Stack now 0 4 12 21
 Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
+Next token is token number (1.6: 1)
+Shifting token number (1.6: 1)
 Entering state 1
-Stack now 0 8 21 1
+Stack now 0 4 12 21 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
 Entering state 30
-Stack now 0 8 21 30
+Stack now 0 4 12 21 30
 Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
+Next token is token ')' (1.7: )
+Reducing stack by rule 7 (line 112):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 27
+Stack now 0 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Stack now 0 8
 Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 23
+Stack now 0 8 23
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Stack now 0 8 23 4
+Reading a token
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
 Entering state 1
-Stack now 0 8 21 30 22 1
+Stack now 0 8 23 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Stack now 0 8 23 4 12
 Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 114):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 20
+Stack now 0 8 23 4 12 20
+Reading a token
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Stack now 0 8 23 4 12 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 29
+Stack now 0 8 23 4 12 20 29
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Stack now 0 8 23 4 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 27
+Stack now 0 8 23 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 32
+Stack now 0 8 23 32
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 10 (line 115):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
 Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Stack now 0 8 21 5 14
-Reducing stack by rule 17 (line 130):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1362: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-./calc.at:1363: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
+./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1363: cat stderr
-  | 1 + 2 * 3 + !- ++
-./calc.at:1362:  $PREPARSER ./calc  input
+./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1368: cat stderr
+stderr:
+syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
 stderr:
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1363:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
 Entering state 1
-Stack now 0 1
+Stack now 0 4 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
 Entering state 21
-Stack now 0 8 21
+Stack now 0 4 12 21
 Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
+Next token is token number (1.6: 1)
+Shifting token number (1.6: 1)
 Entering state 1
-Stack now 0 8 21 1
+Stack now 0 4 12 21 1
 Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
 Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
+Stack now 0 4 12 21 30
 Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 114):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.11: )
+Next token is token ')' (1.7: )
 Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 27
+Stack now 0 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Stack now 0 8 21
 Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 23
+Stack now 0 8 23
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Stack now 0 8 23 4
+Reading a token
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
+Entering state 1
+Stack now 0 8 23 4 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Stack now 0 8 23 4 12
 Reading a token
 Next token is token '-' (1.14: )
 Shifting token '-' (1.14: )
-Entering state 13
-Stack now 0 8 21 5 13
-Reducing stack by rule 18 (line 131):
-   $1 = token '!' (1.13: )
+Entering state 20
+Stack now 0 8 23 4 12 20
+Reading a token
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Stack now 0 8 23 4 12 20 1
+Reducing stack by rule 5 (line 101):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 29
+Stack now 0 8 23 4 12 20 29
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack by rule 8 (line 113):
+   $1 = nterm exp (1.12: 1)
    $2 = token '-' (1.14: )
-Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Stack now 0 8 23 4 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 27
+Stack now 0 8 23 4 12 27
+Reducing stack by rule 13 (line 126):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 32
+Stack now 0 8 23 32
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 10 (line 115):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 97):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 91):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token end of file (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token end of file (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1363:  $PREPARSER ./calc  input
+input:
 stderr:
+  | 1//2
+./calc.at:1368:  $PREPARSER ./calc  input
 stderr:
+1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
+./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -75736,88 +77407,22 @@
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1367: cat stderr
+./calc.at:1362: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 114):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Stack now 0 8 21 5 13
-Reducing stack by rule 18 (line 131):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
 stderr:
+./calc.at:1362: cat stderr
+stderr:
+input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -75899,20 +77504,19 @@
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
 ./calc.at:1363: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-./calc.at:1362: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
+490. calc.at:1362:  ok
+  | 
+  | +1
+./calc.at:1367:  $PREPARSER ./calc  input
 input:
+./calc.at:1368: cat stderr
   | 1 + 2 * 3 + !- ++
 ./calc.at:1363:  $PREPARSER ./calc  input
-./calc.at:1362: cat stderr
+stderr:
+syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -75994,9 +77598,18 @@
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
+syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
+  | error
+./calc.at:1368:  $PREPARSER ./calc  input
+stderr:
+1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 stderr:
+stdout:
 Starting parse
 Entering state 0
 Stack now 0
@@ -76077,19 +77690,71 @@
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-  | 1 + 2 * 3 + !* ++
-./calc.at:1362:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1367: cat stderr
+./calc.at:1367:  $PREPARSER ./calc  /dev/null
+./calc.at:1364: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
+stderr:
+syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+stderr:
+./calc.at:1364: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c calc.h
+
+syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1363: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1368: cat stderr
+./calc.at:1364:  $PREPARSER ./calc  input
+497. calc.at:1371: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} api.push-pull=both api.pure=full parse.lac=full  ...
+./calc.at:1371: mv calc.y.tmp calc.y
+
+./calc.at:1367: cat stderr
+./calc.at:1371: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+./calc.at:1363: cat stderr
+stderr:
+input:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Stack now 0 8
@@ -76099,12 +77764,12 @@
 Entering state 21
 Stack now 0 8 21
 Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
 Entering state 1
 Stack now 0 8 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
 -> $$ = nterm exp (1.5: 2)
 Entering state 30
 Stack now 0 8 21 30
@@ -76114,2873 +77779,39 @@
 Entering state 22
 Stack now 0 8 21 30 22
 Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
 Entering state 1
 Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
 -> $$ = nterm exp (1.9: 3)
 Entering state 31
 Stack now 0 8 21 30 22 31
 Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 114):
+Next token is token '=' (1.11: )
+Reducing stack by rule 9 (line 101):
    $1 = nterm exp (1.5: 2)
    $2 = token '*' (1.7: )
    $3 = nterm exp (1.9: 3)
 -> $$ = nterm exp (1.5-9: 6)
 Entering state 30
 Stack now 0 8 21 30
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 112):
+Next token is token '=' (1.11: )
+Reducing stack by rule 7 (line 99):
    $1 = nterm exp (1.1: 1)
    $2 = token '+' (1.3: )
    $3 = nterm exp (1.5-9: 6)
 -> $$ = nterm exp (1.1-9: 7)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
-Reading a token
-Next token is token '*' (1.14: )
-Shifting token '*' (1.14: )
-Entering state 15
-Stack now 0 8 21 5 15
-Reducing stack by rule 19 (line 132):
-   $1 = token '!' (1.13: )
-   $2 = token '*' (1.14: )
-1.14: memory exhausted
-Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1363: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1363: cat stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 19
+Stack now 0 8 19
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 114):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
-Reading a token
-Next token is token '*' (1.14: )
-Shifting token '*' (1.14: )
-Entering state 15
-Stack now 0 8 21 5 15
-Reducing stack by rule 19 (line 132):
-   $1 = token '!' (1.13: )
-   $2 = token '*' (1.14: )
-1.14: memory exhausted
-Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1363:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1362: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 101):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
-Reading a token
-Next token is token '*' (1.14: )
-Shifting token '*' (1.14: )
-Entering state 15
-Stack now 0 8 21 5 15
-Reducing stack by rule 19 (line 119):
-   $1 = token '!' (1.13: )
-   $2 = token '*' (1.14: )
-1.14: memory exhausted
-Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1362: cat stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 101):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 21 5
-Reading a token
-Next token is token '*' (1.14: )
-Shifting token '*' (1.14: )
-Entering state 15
-Stack now 0 8 21 5 15
-Reducing stack by rule 19 (line 119):
-   $1 = token '!' (1.13: )
-   $2 = token '*' (1.14: )
-1.14: memory exhausted
-Stack now 0 8 21
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-input:
-  | (#) + (#) = 2222
-./calc.at:1363: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1362:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-1.8: syntax error: invalid character: '#'
-Shifting token error (1.8: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token invalid token (1.8: )
-Error: discarding token invalid token (1.8: )
-Error: popping token error (1.8: )
-Stack now 0 8 21 4
-Shifting token error (1.8: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.13-16: 2222)
-Shifting token number (1.13-16: 2222)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1363: cat stderr
-./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-input:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-1.8: syntax error: invalid character: '#'
-Shifting token error (1.8: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token invalid token (1.8: )
-Error: discarding token invalid token (1.8: )
-Error: popping token error (1.8: )
-Stack now 0 8 21 4
-Shifting token error (1.8: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.13-16: 2222)
-Shifting token number (1.13-16: 2222)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-  | (#) + (#) = 2222
-./calc.at:1363:  $PREPARSER ./calc  input
-./calc.at:1362: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token "invalid token" (1.2: )
-Error: discarding token "invalid token" (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-1.8: syntax error: invalid character: '#'
-Shifting token error (1.8: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token "invalid token" (1.8: )
-Error: discarding token "invalid token" (1.8: )
-Error: popping token error (1.8: )
-Stack now 0 8 21 4
-Shifting token error (1.8: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 114):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack by rule 7 (line 99):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token "number" (1.13-16: 2222)
-Shifting token "number" (1.13-16: 2222)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack by rule 6 (line 89):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 84):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1362: cat stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token "invalid token" (1.2: )
-Error: discarding token "invalid token" (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-1.8: syntax error: invalid character: '#'
-Shifting token error (1.8: )
-Entering state 11
-Stack now 0 8 21 4 11
-Next token is token "invalid token" (1.8: )
-Error: discarding token "invalid token" (1.8: )
-Error: popping token error (1.8: )
-Stack now 0 8 21 4
-Shifting token error (1.8: )
-Entering state 11
-Stack now 0 8 21 4 11
-Reading a token
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 26
-Stack now 0 8 21 4 11 26
-Reducing stack by rule 14 (line 114):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack by rule 7 (line 99):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token "number" (1.13-16: 2222)
-Shifting token "number" (1.13-16: 2222)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack by rule 6 (line 89):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 84):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-input:
-  | (1 + #) = 1111
-./calc.at:1362:  $PREPARSER ./calc  input
-./calc.at:1363: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1363: cat stderr
-stderr:
-input:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-  | (1 + #) = 1111
-./calc.at:1363:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token "invalid token" (1.6: )
-Error: discarding token "invalid token" (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 89):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 84):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1362: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-./calc.at:1362: cat stderr
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token "invalid token" (1.6: )
-Error: discarding token "invalid token" (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 89):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 84):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-input:
-  | (# + 1) = 1111
-./calc.at:1362:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
-Error: popping token error (1.2-4: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1363: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
-Error: popping token error (1.2-4: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1363: cat stderr
-./calc.at:1362: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-./calc.at:1362: cat stderr
-  | (# + 1) = 1111
-./calc.at:1363:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token "invalid token" (1.2: )
-Error: discarding token "invalid token" (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token "number" (1.6: 1)
-Error: discarding token "number" (1.6: 1)
-Error: popping token error (1.2-4: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 89):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 84):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1362:  $PREPARSER ./calc  input
-stderr:
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-8: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
-Error: popping token error (1.2-8: )
-Stack now 0 4
-Shifting token error (1.2-10: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token "invalid token" (1.2: )
-Error: discarding token "invalid token" (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token "number" (1.6: 1)
-Error: discarding token "number" (1.6: 1)
-Error: popping token error (1.2-4: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 89):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 84):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1363: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-8: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
-Error: popping token error (1.2-8: )
-Stack now 0 4
-Shifting token error (1.2-10: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 127):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack by rule 6 (line 102):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1363: cat stderr
-./calc.at:1362: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1363:  $PREPARSER ./calc  input
-./calc.at:1362: cat stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token "invalid token" (1.6: )
-Error: discarding token "invalid token" (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-8: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token "number" (1.10: 1)
-Error: discarding token "number" (1.10: 1)
-Error: popping token error (1.2-8: )
-Stack now 0 4
-Shifting token error (1.2-10: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token "number" (1.15-18: 1111)
-Shifting token "number" (1.15-18: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack by rule 6 (line 89):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 84):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1362:  $PREPARSER ./calc  input
-stderr:
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token "invalid token" (1.6: )
-Error: discarding token "invalid token" (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-8: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token "number" (1.10: 1)
-Error: discarding token "number" (1.10: 1)
-Error: popping token error (1.2-8: )
-Stack now 0 4
-Shifting token error (1.2-10: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token "number" (1.15-18: 1111)
-Shifting token "number" (1.15-18: 1111)
-Entering state 1
-Stack now 0 8 19 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 28
-Stack now 0 8 19 28
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack by rule 6 (line 89):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 84):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
-Entering state 1
-Stack now 0 4 12 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 30
-Stack now 0 4 12 21 30
-Reading a token
-Next token is token ')' (1.7: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 27
-Stack now 0 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 23
-Stack now 0 8 23
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Stack now 0 8 23 4
-Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
-Entering state 1
-Stack now 0 8 23 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Stack now 0 8 23 4 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 20
-Stack now 0 8 23 4 12 20
-Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
-Entering state 1
-Stack now 0 8 23 4 12 20 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 29
-Stack now 0 8 23 4 12 20 29
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack by rule 8 (line 113):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Stack now 0 8 23 4 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 27
-Stack now 0 8 23 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 32
-Stack now 0 8 23 32
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 10 (line 115):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1363: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
-Entering state 1
-Stack now 0 4 12 21 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 30
-Stack now 0 4 12 21 30
-Reading a token
-Next token is token ')' (1.7: )
-Reducing stack by rule 7 (line 112):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 27
-Stack now 0 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 23
-Stack now 0 8 23
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Stack now 0 8 23 4
-Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
-Entering state 1
-Stack now 0 8 23 4 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Stack now 0 8 23 4 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 20
-Stack now 0 8 23 4 12 20
-Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
-Entering state 1
-Stack now 0 8 23 4 12 20 1
-Reducing stack by rule 5 (line 101):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 29
-Stack now 0 8 23 4 12 20 29
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack by rule 8 (line 113):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Stack now 0 8 23 4 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 27
-Stack now 0 8 23 4 12 27
-Reducing stack by rule 13 (line 126):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 32
-Stack now 0 8 23 32
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 10 (line 115):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 97):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 91):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token end of file (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token end of file (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-stderr:
-./calc.at:1363: cat stderr
-stdout:
-./calc.at:1364: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
-./calc.at:1362: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1364: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c calc.h
-
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1362: cat stderr
-./calc.at:1363:  $PREPARSER ./calc  input
-stderr:
-490. calc.at:1362: Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-Next token is token "number" (1.6: 1)
-Shifting token "number" (1.6: 1)
-Entering state 1
-Stack now 0 4 12 21 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 30
-Stack now 0 4 12 21 30
-Reading a token
-Next token is token ')' (1.7: )
-Reducing stack by rule 7 (line 99):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 27
-Stack now 0 4 12 27
-Reducing stack by rule 13 (line 113):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 23
-Stack now 0 8 23
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Stack now 0 8 23 4
-Reading a token
-Next token is token "number" (1.12: 1)
-Shifting token "number" (1.12: 1)
-Entering state 1
-Stack now 0 8 23 4 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Stack now 0 8 23 4 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 20
-Stack now 0 8 23 4 12 20
-Reading a token
-Next token is token "number" (1.16: 1)
-Shifting token "number" (1.16: 1)
-Entering state 1
-Stack now 0 8 23 4 12 20 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 29
-Stack now 0 8 23 4 12 20 29
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack by rule 8 (line 100):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Stack now 0 8 23 4 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 27
-Stack now 0 8 23 4 12 27
-Reducing stack by rule 13 (line 113):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 32
-Stack now 0 8 23 32
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 10 (line 102):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 84):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-input:
- ok
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1364:  $PREPARSER ./calc  input
-stderr:
-stdout:
-./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1367: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 4 12 21
-Reading a token
-Next token is token "number" (1.6: 1)
-Shifting token "number" (1.6: 1)
-Entering state 1
-Stack now 0 4 12 21 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 30
-Stack now 0 4 12 21 30
-Reading a token
-Next token is token ')' (1.7: )
-Reducing stack by rule 7 (line 99):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 27
-Stack now 0 4 12 27
-Reducing stack by rule 13 (line 113):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 23
-Stack now 0 8 23
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Stack now 0 8 23 4
-Reading a token
-Next token is token "number" (1.12: 1)
-Shifting token "number" (1.12: 1)
-Entering state 1
-Stack now 0 8 23 4 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Stack now 0 8 23 4 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 20
-Stack now 0 8 23 4 12 20
-Reading a token
-Next token is token "number" (1.16: 1)
-Shifting token "number" (1.16: 1)
-Entering state 1
-Stack now 0 8 23 4 12 20 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 29
-Stack now 0 8 23 4 12 20 29
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack by rule 8 (line 100):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Stack now 0 8 23 4 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 27
-Stack now 0 8 23 4 12 27
-Reducing stack by rule 13 (line 113):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 32
-Stack now 0 8 23 32
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 10 (line 102):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Stack now 0 8 25
-Reducing stack by rule 4 (line 84):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Stack now 0 6 17
-Stack now 0 6 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Stack now 0 8 21 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Stack now 0 8 21 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Stack now 0 8 21 30 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Stack now 0 8 21 30 22 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Stack now 0 8 21 30 22 31
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack by rule 9 (line 101):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Stack now 0 8 21 30
-Next token is token '=' (1.11: )
-Reducing stack by rule 7 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 19
-Stack now 0 8 19
-Reading a token
-Next token is token "number" (1.13: 7)
-Shifting token "number" (1.13: 7)
+Next token is token "number" (1.13: 7)
+Shifting token "number" (1.13: 7)
 Entering state 1
 Stack now 0 8 19 1
 Reducing stack by rule 5 (line 88):
@@ -79931,19 +78762,17 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
-./calc.at:1367: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c
-
-
 ./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 = 2 = 3
+./calc.at:1368:  $PREPARSER ./calc  input
+stderr:
+1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
+./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 1 + 2 * 3 + !* ++
+./calc.at:1363:  $PREPARSER ./calc  input
+stderr:
+input:
 stderr:
 Starting parse
 Entering state 0
@@ -80963,39 +79792,193 @@
 Cleanup: popping token "end of input" (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
 ./calc.at:1364: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 ./calc.at:1367:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Stack now 0 8 21 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 101):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 99):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
+Reading a token
+Next token is token '*' (1.14: )
+Shifting token '*' (1.14: )
+Entering state 15
+Stack now 0 8 21 5 15
+Reducing stack by rule 19 (line 119):
+   $1 = token '!' (1.13: )
+   $2 = token '*' (1.14: )
+1.14: memory exhausted
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
 stderr:
-input:
-./calc.at:1363: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+error: 4444 != 1
 ./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
+./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+input:
+stderr:
+syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+error: 4444 != 1
   | 1 2
 ./calc.at:1364:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Stack now 0 8 21 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Stack now 0 8 21 30 22
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Stack now 0 8 21 30 22 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Stack now 0 8 21 30 22 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 101):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Stack now 0 8 21 30
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 99):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 21 5
+Reading a token
+Next token is token '*' (1.14: )
+Shifting token '*' (1.14: )
+Entering state 15
+Stack now 0 8 21 5 15
+Reducing stack by rule 19 (line 119):
+   $1 = token '!' (1.13: )
+   $2 = token '*' (1.14: )
+1.14: memory exhausted
+Stack now 0 8 21
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1368: cat stderr
 stderr:
-stderr:
-./calc.at:1367: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+./calc.at:1371: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
 Starting parse
 Entering state 0
 Stack now 0
@@ -81016,10 +79999,37 @@
 Stack now 0
 Cleanup: discarding lookahead token "number" (1.3: 2)
 Stack now 0
-./calc.at:1363: cat stderr
+./calc.at:1363: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
 ./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stdout:
+./calc.at:1369: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
+./calc.at:1363: cat stderr
+  | 
+  | +1
+./calc.at:1368:  $PREPARSER ./calc  input
 stderr:
-491. calc.at:1363:  ok
+./calc.at:1369: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c
+
 Starting parse
 Entering state 0
 Stack now 0
@@ -81040,7 +80050,34 @@
 Stack now 0
 Cleanup: discarding lookahead token "number" (1.3: 2)
 Stack now 0
+./calc.at:1367: cat stderr
+stderr:
+2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+input:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1369:  $PREPARSER ./calc  input
+  | (#) + (#) = 2222
+./calc.at:1363:  $PREPARSER ./calc  input
+stderr:
 input:
+  | (!!) + (1 2) = 1
+stderr:
+stderr:
+./calc.at:1367:  $PREPARSER ./calc  input
 ./calc.at:1364: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -81051,23 +80088,285 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | 1 2
-./calc.at:1367:  $PREPARSER ./calc  input
-./calc.at:1364: cat stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token "invalid token" (1.2: )
+Error: discarding token "invalid token" (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+1.8: syntax error: invalid character: '#'
+Shifting token error (1.8: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token "invalid token" (1.8: )
+Error: discarding token "invalid token" (1.8: )
+Error: popping token error (1.8: )
+Stack now 0 8 21 4
+Shifting token error (1.8: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 114):
+   $1 = token '(' (1.7: )
+   $2 = token error (1.8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack by rule 7 (line 99):
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token "number" (1.13-16: 2222)
+Shifting token "number" (1.13-16: 2222)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack by rule 6 (line 89):
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 84):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 stderr:
-syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+error: 2222 != 1
+./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
 stderr:
-494. calc.at:1368: testing Calculator parse.error=custom %locations api.prefix={calc}  ...
-./calc.at:1368: mv calc.y.tmp calc.y
-
-syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1364: cat stderr
+stderr:
+syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+error: 2222 != 1
+./calc.at:1369: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+input:
+./calc.at:1368: cat stderr
   | 1//2
 ./calc.at:1364:  $PREPARSER ./calc  input
-./calc.at:1368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
 stderr:
-
+input:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token "invalid token" (1.2: )
+Error: discarding token "invalid token" (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+1.8: syntax error: invalid character: '#'
+Shifting token error (1.8: )
+Entering state 11
+Stack now 0 8 21 4 11
+Next token is token "invalid token" (1.8: )
+Error: discarding token "invalid token" (1.8: )
+Error: popping token error (1.8: )
+Stack now 0 8 21 4
+Shifting token error (1.8: )
+Entering state 11
+Stack now 0 8 21 4 11
+Reading a token
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
+Entering state 26
+Stack now 0 8 21 4 11 26
+Reducing stack by rule 14 (line 114):
+   $1 = token '(' (1.7: )
+   $2 = token error (1.8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
+Entering state 30
+Stack now 0 8 21 30
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack by rule 7 (line 99):
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token "number" (1.13-16: 2222)
+Shifting token "number" (1.13-16: 2222)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack by rule 6 (line 89):
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 84):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+  | 1 2
+./calc.at:1369:  $PREPARSER ./calc  input
+./calc.at:1368:  $PREPARSER ./calc  /dev/null
+./calc.at:1367: cat stderr
+stderr:
+stderr:
+./calc.at:1363: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -81095,10 +80394,13 @@
 Stack now 0
 Cleanup: discarding lookahead token '/' (1.3: )
 Stack now 0
+./calc.at:1363: cat stderr
 ./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1367: cat stderr
+1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 stderr:
-input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -81126,11 +80428,117 @@
 Stack now 0
 Cleanup: discarding lookahead token '/' (1.3: )
 Stack now 0
-  | 1//2
-./calc.at:1367:  $PREPARSER ./calc  input
 stderr:
-syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+  | (1 + #) = 1111
+./calc.at:1363:  $PREPARSER ./calc  input
+./calc.at:1368: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token "invalid token" (1.6: )
+Error: discarding token "invalid token" (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 89):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 84):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+input:
+./calc.at:1369: cat stderr
+  | (- *) + (1 2) = 1
+./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1367:  $PREPARSER ./calc  input
 ./calc.at:1364: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -81141,20 +80549,154 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+input:
+stderr:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+  | 1//2
+./calc.at:1369:  $PREPARSER ./calc  input
+./calc.at:1368:  $PREPARSER ./calc  input
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+error: 2222 != 1
+./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
 stderr:
+1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.1-46: error: 4444 != 1
+./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
 ./calc.at:1364: cat stderr
-syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-495. calc.at:1369: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
-./calc.at:1369: mv calc.y.tmp calc.y
-
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token "invalid token" (1.6: )
+Error: discarding token "invalid token" (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 89):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 84):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+error: 2222 != 1
+stderr:
+./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.1-46: error: 4444 != 1
 input:
-./calc.at:1369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
   | error
 ./calc.at:1364:  $PREPARSER ./calc  input
-./calc.at:1367: cat stderr
 stderr:
 stderr:
-stdout:
+1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
+./calc.at:1363: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1368: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -81163,10 +80705,30 @@
 1.1: syntax error, unexpected invalid token
 Cleanup: discarding lookahead token "invalid token" (1.1: )
 Stack now 0
-./types.at:139:  $PREPARSER ./test
+./calc.at:1367: cat stderr
+./calc.at:1369: cat stderr
+./calc.at:1363: cat stderr
 ./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1368: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
+input:
+  | (!!) + (1 2) = 1
+./calc.at:1368:  $PREPARSER ./calc  input
+stderr:
+1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-16: error: 2222 != 1
+./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+input:
+  | (* *) + (*) + (*)
+./calc.at:1367:  $PREPARSER ./calc  input
+stderr:
+  | (# + 1) = 1111
+stderr:
+./calc.at:1363:  $PREPARSER ./calc  input
+stderr:
 stderr:
+1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-16: error: 2222 != 1
+input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -81175,9 +80737,108 @@
 1.1: syntax error, unexpected invalid token
 Cleanup: discarding lookahead token "invalid token" (1.1: )
 Stack now 0
-stderr:
-input:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token "invalid token" (1.2: )
+Error: discarding token "invalid token" (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token "number" (1.6: 1)
+Error: discarding token "number" (1.6: 1)
+Error: popping token error (1.2-4: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 89):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 84):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | error
+./calc.at:1369:  $PREPARSER ./calc  input
+./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1364: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -81188,20 +80849,114 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | error
-449. types.at:139:  ok
-./calc.at:1367:  $PREPARSER ./calc  input
+./calc.at:1368: cat stderr
 stderr:
-syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1364: cat stderr
 stderr:
-syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-
+stderr:
+./calc.at:1364: cat stderr
+1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token "invalid token" (1.2: )
+Error: discarding token "invalid token" (1.2: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Error: popping token error (1.2: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token "number" (1.6: 1)
+Error: discarding token "number" (1.6: 1)
+Error: popping token error (1.2-4: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 89):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 84):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 input:
-./calc.at:1367: cat stderr
   | 1 = 2 = 3
 ./calc.at:1364:  $PREPARSER ./calc  input
+./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -81243,11 +80998,32 @@
 Cleanup: discarding lookahead token '=' (1.7: )
 Stack now 0
 input:
-  | 1 = 2 = 3
-./calc.at:1367:  $PREPARSER ./calc  input
+  | (- *) + (1 2) = 1
+./calc.at:1368:  $PREPARSER ./calc  input
 ./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
+1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-17: error: 2222 != 1
+./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1363: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+stderr:
+1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-17: error: 2222 != 1
+./calc.at:1367: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -81287,93 +81063,15 @@
 Stack now 0
 Cleanup: discarding lookahead token '=' (1.7: )
 Stack now 0
-syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
-./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
-./calc.at:1364: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1369: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
-./calc.at:1364: cat stderr
-./calc.at:1367: cat stderr
-496. calc.at:1370: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} api.push-pull=both api.pure=full  ...
-./calc.at:1370: mv calc.y.tmp calc.y
-
-input:
-  | 
-  | +1
-./calc.at:1364:  $PREPARSER ./calc  input
-./calc.at:1370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+./calc.at:1363: cat stderr
 input:
-  | 
-  | +1
+  | 1 + 2 * 3 + !+ ++
 ./calc.at:1367:  $PREPARSER ./calc  input
+./calc.at:1369: cat stderr
 stderr:
-stderr:
-syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
 ./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Stack now 0 3
-Reducing stack by rule 3 (line 83):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Stack now 0
-Cleanup: discarding lookahead token '+' (2.1: )
-Stack now 0
-./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Stack now 0 3
-Reducing stack by rule 3 (line 83):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Stack now 0
-Cleanup: discarding lookahead token '+' (2.1: )
-Stack now 0
+./calc.at:1368: cat stderr
+input:
 ./calc.at:1364: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -81384,61 +81082,17 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1367: cat stderr
-./calc.at:1367:  $PREPARSER ./calc  /dev/null
-./calc.at:1364: cat stderr
-stderr:
-./calc.at:1364:  $PREPARSER ./calc  /dev/null
-stderr:
-syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" (1.1: )
-Stack now 0
-./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+  | (1 + # + 1) = 1111
 stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" (1.1: )
-Stack now 0
-./calc.at:1364: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1367: cat stderr
-./calc.at:1370: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
-./calc.at:1364: cat stderr
+./calc.at:1363:  $PREPARSER ./calc  input
 input:
+./calc.at:1367: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+  | (* *) + (*) + (*)
+./calc.at:1368:  $PREPARSER ./calc  input
+./calc.at:1364: cat stderr
 input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1364:  $PREPARSER ./calc  input
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1367:  $PREPARSER ./calc  input
 stderr:
 stderr:
-syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-error: 4444 != 1
-./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -81448,57 +81102,812 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-Next token is token ')' (1.2: )
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token ')' (1.2: )
-Shifting token ')' (1.2: )
-Entering state 26
-Stack now 0 4 11 26
-Reducing stack by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.2: )
--> $$ = nterm exp (1.1-2: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Stack now 0 8 21
-Reading a token
-Next token is token '(' (1.6: )
-Shifting token '(' (1.6: )
-Entering state 4
-Stack now 0 8 21 4
-Reading a token
-Next token is token "number" (1.7: 1)
-Shifting token "number" (1.7: 1)
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
 Entering state 1
-Stack now 0 8 21 4 1
+Stack now 0 4 1
 Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.7: 1)
--> $$ = nterm exp (1.7: 1)
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
 Entering state 12
-Stack now 0 8 21 4 12
+Stack now 0 4 12
 Reading a token
-Next token is token '+' (1.9: )
-Shifting token '+' (1.9: )
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
 Entering state 21
-Stack now 0 8 21 4 12 21
-Reading a token
-Next token is token "number" (1.11: 1)
-Shifting token "number" (1.11: 1)
-Entering state 1
-Stack now 0 8 21 4 12 21 1
-Reducing stack by rule 5 (line 88):
-   $1 = token "number" (1.11: 1)
--> $$ = nterm exp (1.11: 1)
-Entering state 30
-Stack now 0 8 21 4 12 21 30
+Stack now 0 4 12 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token "invalid token" (1.6: )
+Error: discarding token "invalid token" (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-8: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token "number" (1.10: 1)
+Error: discarding token "number" (1.10: 1)
+Error: popping token error (1.2-8: )
+Stack now 0 4
+Shifting token error (1.2-10: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token "number" (1.15-18: 1111)
+Shifting token "number" (1.15-18: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack by rule 6 (line 89):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 84):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+  | 1 = 2 = 3
+./calc.at:1369:  $PREPARSER ./calc  input
+stderr:
+1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
+./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1367:  $PREPARSER ./calc  input
+stderr:
+stderr:
+stderr:
+1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token "invalid token" (1.6: )
+Error: discarding token "invalid token" (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-8: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token "number" (1.10: 1)
+Error: discarding token "number" (1.10: 1)
+Error: popping token error (1.2-8: )
+Stack now 0 4
+Shifting token error (1.2-10: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 19
+Stack now 0 8 19
+Reading a token
+Next token is token "number" (1.15-18: 1111)
+Shifting token "number" (1.15-18: 1111)
+Entering state 1
+Stack now 0 8 19 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 28
+Stack now 0 8 19 28
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack by rule 6 (line 89):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 84):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 
+  | +1
+./calc.at:1364:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Stack now 0 3
+Reducing stack by rule 3 (line 83):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Stack now 0
+Cleanup: discarding lookahead token '+' (2.1: )
+Stack now 0
+./calc.at:1369: cat stderr
+./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1363: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+stderr:
+./calc.at:1368: cat stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Stack now 0 3
+Reducing stack by rule 3 (line 83):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Stack now 0
+Cleanup: discarding lookahead token '+' (2.1: )
+Stack now 0
+input:
+./calc.at:1364: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | 
+  | +1
+./calc.at:1369:  $PREPARSER ./calc  input
+./calc.at:1363: cat stderr
+stderr:
+2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1367: cat stderr
+input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1368:  $PREPARSER ./calc  input
+stderr:
+2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1364: cat stderr
+input:
+stderr:
+  | (1 + 1) / (1 - 1)
+./calc.at:1363:  $PREPARSER ./calc  input
+./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1364:  $PREPARSER ./calc  /dev/null
+stderr:
+./calc.at:1369: cat stderr
+stderr:
+input:
+./calc.at:1368: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+  | 1 + 2 * 3 + !* ++
+stderr:
+./calc.at:1367:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+Next token is token "number" (1.6: 1)
+Shifting token "number" (1.6: 1)
+Entering state 1
+Stack now 0 4 12 21 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 30
+Stack now 0 4 12 21 30
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack by rule 7 (line 99):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 27
+Stack now 0 4 12 27
+Reducing stack by rule 13 (line 113):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 23
+Stack now 0 8 23
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Stack now 0 8 23 4
+Reading a token
+Next token is token "number" (1.12: 1)
+Shifting token "number" (1.12: 1)
+Entering state 1
+Stack now 0 8 23 4 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Stack now 0 8 23 4 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 20
+Stack now 0 8 23 4 12 20
+Reading a token
+Next token is token "number" (1.16: 1)
+Shifting token "number" (1.16: 1)
+Entering state 1
+Stack now 0 8 23 4 12 20 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 29
+Stack now 0 8 23 4 12 20 29
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack by rule 8 (line 100):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Stack now 0 8 23 4 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 27
+Stack now 0 8 23 4 12 27
+Reducing stack by rule 13 (line 113):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 32
+Stack now 0 8 23 32
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 10 (line 102):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 84):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1369:  $PREPARSER ./calc  /dev/null
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Now at end of input.
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" (1.1: )
+Stack now 0
+stderr:
+stderr:
+./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 + !- ++
+memory exhausted
+./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1368:  $PREPARSER ./calc  input
+stderr:
+stderr:
+stderr:
+1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Now at end of input.
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" (1.1: )
+Stack now 0
+memory exhausted
+./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1364: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 4 12 21
+Reading a token
+Next token is token "number" (1.6: 1)
+Shifting token "number" (1.6: 1)
+Entering state 1
+Stack now 0 4 12 21 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 30
+Stack now 0 4 12 21 30
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack by rule 7 (line 99):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 27
+Stack now 0 4 12 27
+Reducing stack by rule 13 (line 113):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 23
+Stack now 0 8 23
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Stack now 0 8 23 4
+Reading a token
+Next token is token "number" (1.12: 1)
+Shifting token "number" (1.12: 1)
+Entering state 1
+Stack now 0 8 23 4 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Stack now 0 8 23 4 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 20
+Stack now 0 8 23 4 12 20
+Reading a token
+Next token is token "number" (1.16: 1)
+Shifting token "number" (1.16: 1)
+Entering state 1
+Stack now 0 8 23 4 12 20 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 29
+Stack now 0 8 23 4 12 20 29
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack by rule 8 (line 100):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Stack now 0 8 23 4 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 27
+Stack now 0 8 23 4 12 27
+Reducing stack by rule 13 (line 113):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 32
+Stack now 0 8 23 32
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 10 (line 102):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Stack now 0 8 25
+Reducing stack by rule 4 (line 84):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Stack now 0 6 17
+Stack now 0 6 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1367: cat stderr
+stderr:
+./calc.at:1364: cat stderr
+./calc.at:1369: cat stderr
+stderr:
+stdout:
+./calc.at:1370: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
+input:
+./calc.at:1370: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c
+
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1369:  $PREPARSER ./calc  input
+input:
+stderr:
+./calc.at:1363: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | (#) + (#) = 2222
+1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.1-46: error: 4444 != 1
+./calc.at:1368: cat stderr
+./calc.at:1367:  $PREPARSER ./calc  input
+stderr:
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1363: cat stderr
+input:
+  | 1 + 2 * 3 + !* ++
+stderr:
+input:
+./calc.at:1368:  $PREPARSER ./calc  input
+./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+491. calc.at:1363:  ok
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+stderr:
+./calc.at:1364:  $PREPARSER ./calc  input
+1.14: memory exhausted
+./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.1-46: error: 4444 != 1
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token ')' (1.2: )
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token ')' (1.2: )
+Shifting token ')' (1.2: )
+Entering state 26
+Stack now 0 4 11 26
+Reducing stack by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.2: )
+-> $$ = nterm exp (1.1-2: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Stack now 0 8 21
+Reading a token
+Next token is token '(' (1.6: )
+Shifting token '(' (1.6: )
+Entering state 4
+Stack now 0 8 21 4
+Reading a token
+Next token is token "number" (1.7: 1)
+Shifting token "number" (1.7: 1)
+Entering state 1
+Stack now 0 8 21 4 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.7: 1)
+-> $$ = nterm exp (1.7: 1)
+Entering state 12
+Stack now 0 8 21 4 12
+Reading a token
+Next token is token '+' (1.9: )
+Shifting token '+' (1.9: )
+Entering state 21
+Stack now 0 8 21 4 12 21
+Reading a token
+Next token is token "number" (1.11: 1)
+Shifting token "number" (1.11: 1)
+Entering state 1
+Stack now 0 8 21 4 12 21 1
+Reducing stack by rule 5 (line 88):
+   $1 = token "number" (1.11: 1)
+-> $$ = nterm exp (1.11: 1)
+Entering state 30
+Stack now 0 8 21 4 12 21 30
 Reading a token
 Next token is token '+' (1.13: )
 Reducing stack by rule 7 (line 99):
@@ -81756,14 +82165,31 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-stderr:
-syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-error: 4444 != 1
+input:
+1.14: memory exhausted
+./calc.at:1369: cat stderr
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1370:  $PREPARSER ./calc  input
 ./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1367: cat stderr
+stderr:
+./calc.at:1368: cat stderr
+./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
+input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -82081,7 +82507,20 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1367: cat stderr
+  | (!!) + (1 2) = 1
+./calc.at:1369:  $PREPARSER ./calc  input
+stderr:
+input:
+input:
+  | (1 + #) = 1111
+./calc.at:1367:  $PREPARSER ./calc  input
+  | (#) + (#) = 2222
+./calc.at:1368:  $PREPARSER ./calc  input
+stderr:
+syntax error: invalid character: '#'
+./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1370: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 ./calc.at:1364: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -82092,21 +82531,35 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-16: error: 2222 != 1
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
 ./calc.at:1364: cat stderr
-  | (!!) + (1 2) = 1
-./calc.at:1367:  $PREPARSER ./calc  input
 stderr:
-syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-error: 2222 != 1
-./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error: invalid character: '#'
+stderr:
+1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-16: error: 2222 != 1
 input:
+./calc.at:1369: cat stderr
+input:
+  | 1 2
+./calc.at:1370:  $PREPARSER ./calc  input
+./calc.at:1368: cat stderr
 stderr:
   | (!!) + (1 2) = 1
 ./calc.at:1364:  $PREPARSER ./calc  input
-syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-error: 2222 != 1
+1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
 stderr:
+./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1367: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -82247,8 +82700,39 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+input:
+stderr:
+input:
+input:
+  | (1 + #) = 1111
+  | (- *) + (1 2) = 1
+./calc.at:1369:  $PREPARSER ./calc  input
+./calc.at:1368:  $PREPARSER ./calc  input
+  | (# + 1) = 1111
+./calc.at:1367:  $PREPARSER ./calc  input
 ./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+stderr:
+498. calc.at:1374: testing Calculator %start input exp NUM api.value.type=union  ...
+1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-17: error: 2222 != 1
+./calc.at:1374: mv calc.y.tmp calc.y
+
+stderr:
+syntax error: invalid character: '#'
+./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.6: syntax error: invalid character: '#'
+./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+stderr:
+./calc.at:1370: cat stderr
+1.6: syntax error: invalid character: '#'
+stderr:
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -82389,10 +82873,17 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1367: cat stderr
+1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-17: error: 2222 != 1
+syntax error: invalid character: '#'
 input:
-  | (- *) + (1 2) = 1
-./calc.at:1367:  $PREPARSER ./calc  input
+  | 1//2
+./calc.at:1370:  $PREPARSER ./calc  input
+./calc.at:1369: cat stderr
+./calc.at:1368: cat stderr
+stderr:
+1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
 ./calc.at:1364: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -82403,20 +82894,32 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
+./calc.at:1367: cat stderr
+input:
 ./calc.at:1364: cat stderr
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-error: 2222 != 1
-./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (* *) + (*) + (*)
+./calc.at:1369:  $PREPARSER ./calc  input
+input:
+./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (# + 1) = 1111
+./calc.at:1368:  $PREPARSER ./calc  input
 stderr:
 input:
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-error: 2222 != 1
+stderr:
+1.2: syntax error: invalid character: '#'
+./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | (- *) + (1 2) = 1
+input:
+stderr:
 ./calc.at:1364:  $PREPARSER ./calc  input
+1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+  | (1 + # + 1) = 1111
+./calc.at:1367:  $PREPARSER ./calc  input
+./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
 Starting parse
 Entering state 0
 Stack now 0
@@ -82565,7 +83068,17 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+stderr:
+1.2: syntax error: invalid character: '#'
+stderr:
+1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 ./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1369: cat stderr
+syntax error: invalid character: '#'
+./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -82715,7 +83228,14 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1367: cat stderr
+stderr:
+input:
+./calc.at:1370: cat stderr
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1369:  $PREPARSER ./calc  input
+./calc.at:1368: cat stderr
+syntax error: invalid character: '#'
+stderr:
 ./calc.at:1364: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -82726,23 +83246,46 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1367: cat stderr
 input:
-./calc.at:1364: cat stderr
-  | (* *) + (*) + (*)
+input:
+stderr:
+input:
+  | (1 + 1) / (1 - 1)
 ./calc.at:1367:  $PREPARSER ./calc  input
+./calc.at:1364: cat stderr
+./calc.at:1369: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+  | error
+./calc.at:1370:  $PREPARSER ./calc  input
+  | (1 + # + 1) = 1111
+./calc.at:1368:  $PREPARSER ./calc  input
 stderr:
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+stderr:
+stderr:
+1.6: syntax error: invalid character: '#'
+error: null divisor
+./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1374: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
+stderr:
+1.6: syntax error: invalid character: '#'
+input:
 input:
-  | (* *) + (*) + (*)
 stderr:
+error: null divisor
+  | 1 + 2 * 3 + !- ++
+1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1369:  $PREPARSER ./calc  input
+  | (* *) + (*) + (*)
 ./calc.at:1364:  $PREPARSER ./calc  input
+./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+stderr:
+./calc.at:1368: cat stderr
+./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
 Starting parse
 Entering state 0
 Stack now 0
@@ -82894,8 +83437,14 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+input:
 ./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (1 + 1) / (1 - 1)
+./calc.at:1368:  $PREPARSER ./calc  input
+stderr:
 stderr:
+stderr:
+./calc.at:1367: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -83047,7 +83596,11 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1367: cat stderr
+./calc.at:1370: cat stderr
+1.11-17: error: null divisor
+./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+./calc.at:1369: cat stderr
 ./calc.at:1364: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -83058,18 +83611,31 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1364: cat stderr
-./calc.at:1367:  $PREPARSER ./calc  input
+  | 1 = 2 = 3
 stderr:
-./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1370:  $PREPARSER ./calc  input
+493. calc.at:1367:  ok
+1.11-17: error: null divisor
+stderr:
+./calc.at:1364: cat stderr
+1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
 input:
+  | 1 + 2 * 3 + !* ++
+./calc.at:1369:  $PREPARSER ./calc  input
+./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1368: cat stderr
+1.14: memory exhausted
 stderr:
+1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
+./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
   | 1 + 2 * 3 + !+ ++
-./calc.at:1367: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 ./calc.at:1364:  $PREPARSER ./calc  input
+
 stderr:
+494. calc.at:1368:  ok
+./calc.at:1370: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -83150,13 +83716,15 @@
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1367:  $PREPARSER ./calc  input
-./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.14: memory exhausted
 stderr:
+input:
+./calc.at:1369: cat stderr
+  | 
+  | +1
+./calc.at:1370:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -83239,10 +83807,24 @@
 Cleanup: popping nterm exp (1.1-9: 7)
 ./calc.at:1364: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 stderr:
+2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
+input:
+./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (#) + (#) = 2222
+./calc.at:1369:  $PREPARSER ./calc  input
+stderr:
+2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
 input:
+./calc.at:1370: cat stderr
   | 1 + 2 * 3 + !- ++
 ./calc.at:1364:  $PREPARSER ./calc  input
 stderr:
+
+./calc.at:1370:  $PREPARSER ./calc  /dev/null
+stderr:
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
 Starting parse
 Entering state 0
 Stack now 0
@@ -83323,8 +83905,18 @@
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
+499. calc.at:1375: testing Calculator %start input exp NUM api.value.type=union %locations parse.error=detailed  ...
+1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1375: mv calc.y.tmp calc.y
+
+./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1367: cat stderr
+stderr:
+./calc.at:1375: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+stderr:
+./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
 stderr:
 Starting parse
 Entering state 0
@@ -83406,9 +83998,6 @@
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1367:  $PREPARSER ./calc  input
 ./calc.at:1364: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -83419,16 +84008,48 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1369: cat stderr
+input:
+./calc.at:1364: cat stderr
+  | (1 + #) = 1111
+./calc.at:1369:  $PREPARSER ./calc  input
+./calc.at:1370: cat stderr
 stderr:
-memory exhausted
-./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1364: cat stderr
-memory exhausted
+stdout:
+stderr:
+./types.at:139:  $PREPARSER ./test
+1.6: syntax error: invalid character: '#'
+stdout:
+stderr:
+500. calc.at:1387: testing Calculator %glr-parser   ...
+./calc.at:1387: mv calc.y.tmp calc.y
+
+./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1371: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
 input:
+input:
+stderr:
+./calc.at:1371: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c
+
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1370:  $PREPARSER ./calc  input
   | 1 + 2 * 3 + !* ++
 ./calc.at:1364:  $PREPARSER ./calc  input
-./calc.at:1367: cat stderr
+./calc.at:1387: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+1.6: syntax error: invalid character: '#'
+./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -83511,7 +84132,35 @@
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
+1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.1-46: error: 4444 != 1
+./calc.at:1369: cat stderr
+452. types.at:139:  ok
 ./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+input:
+  | (# + 1) = 1111
+./calc.at:1369:  $PREPARSER ./calc  input
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1371:  $PREPARSER ./calc  input
+stderr:
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -83594,9 +84243,14 @@
 Stack now 0 8 21
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-input:
-  | (#) + (#) = 2222
-./calc.at:1367:  $PREPARSER ./calc  input
+stderr:
+1.2: syntax error: invalid character: '#'
+1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.1-46: error: 4444 != 1
+./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1364: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -83607,18 +84261,30 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1375: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
 stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
+./calc.at:1371: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 ./calc.at:1364: cat stderr
-stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
+./calc.at:1370: cat stderr
 input:
+stderr:
   | (#) + (#) = 2222
 ./calc.at:1364:  $PREPARSER ./calc  input
+input:
+1.2: syntax error: invalid character: '#'
+  | (!!) + (1 2) = 1
+./calc.at:1370:  $PREPARSER ./calc  input
+input:
 stderr:
+stderr:
+1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-16: error: 2222 != 1
+./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 2
+./calc.at:1371:  $PREPARSER ./calc  input
+./calc.at:1369: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -83740,10 +84406,19 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1367: cat stderr
+./calc.at:1387: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
 stderr:
 input:
+stderr:
+./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-16: error: 2222 != 1
+  | (1 + # + 1) = 1111
+./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1369:  $PREPARSER ./calc  input
+stderr:
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -83865,8 +84540,14 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-  | (1 + #) = 1111
-./calc.at:1367:  $PREPARSER ./calc  input
+1.6: syntax error: invalid character: '#'
+./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+./calc.at:1370: cat stderr
+501. calc.at:1389: testing Calculator %glr-parser %header  ...
+./calc.at:1389: mv calc.y.tmp calc.y
+
 stderr:
 ./calc.at:1364: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
@@ -83878,16 +84559,31 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-syntax error: invalid character: '#'
-./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+1.6: syntax error: invalid character: '#'
 ./calc.at:1364: cat stderr
-syntax error: invalid character: '#'
 input:
+  | (- *) + (1 2) = 1
+./calc.at:1370:  $PREPARSER ./calc  input
+./calc.at:1371: cat stderr
+./calc.at:1369: cat stderr
+stderr:
+./calc.at:1389: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-17: error: 2222 != 1
+./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1369:  $PREPARSER ./calc  input
   | (1 + #) = 1111
 ./calc.at:1364:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1367: cat stderr
+stderr:
+1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-17: error: 2222 != 1
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -83986,14 +84682,15 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.11-17: error: null divisor
 input:
-  | (# + 1) = 1111
-./calc.at:1367:  $PREPARSER ./calc  input
+./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1//2
+./calc.at:1371:  $PREPARSER ./calc  input
+./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
 Starting parse
 Entering state 0
 Stack now 0
@@ -84093,7 +84790,9 @@
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 stderr:
-syntax error: invalid character: '#'
+./calc.at:1370: cat stderr
+./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.11-17: error: null divisor
 ./calc.at:1364: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -84104,20 +84803,20 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1367: cat stderr
+stderr:
 ./calc.at:1364: cat stderr
+1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
 input:
+./calc.at:1369: cat stderr
+  | (* *) + (*) + (*)
+./calc.at:1370:  $PREPARSER ./calc  input
 input:
-  | (1 + # + 1) = 1111
-./calc.at:1367:  $PREPARSER ./calc  input
-stderr:
+./calc.at:1389: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
   | (# + 1) = 1111
 ./calc.at:1364:  $PREPARSER ./calc  input
-syntax error: invalid character: '#'
-./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1371: cat stderr
 stderr:
 stderr:
-syntax error: invalid character: '#'
 Starting parse
 Entering state 0
 Stack now 0
@@ -84213,8 +84912,25 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+495. calc.at:1369:  ok
 ./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | error
+./calc.at:1371:  $PREPARSER ./calc  input
+stderr:
+stderr:
 stderr:
+stdout:
+stderr:
+./calc.at:1374: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
+1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 Starting parse
 Entering state 0
 Stack now 0
@@ -84310,7 +85026,22 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1367: cat stderr
+./calc.at:1374: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c
+
+./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+
+1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+input:
 ./calc.at:1364: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -84321,19 +85052,48 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1370: cat stderr
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1374:  $PREPARSER ./calc  input
+./calc.at:1371: cat stderr
+stderr:
 input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1367:  $PREPARSER ./calc  input
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1370:  $PREPARSER ./calc  input
+./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-error: null divisor
-./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1364: cat stderr
-stderr:
 input:
+input:
+502. calc.at:1390: testing Calculator %glr-parser %locations  ...
+  | 1 = 2 = 3
+./calc.at:1371:  $PREPARSER ./calc  input
+stderr:
   | (1 + # + 1) = 1111
-error: null divisor
 ./calc.at:1364:  $PREPARSER ./calc  input
+./calc.at:1390: mv calc.y.tmp calc.y
+
+./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+stderr:
+./calc.at:1390: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+stderr:
+1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1370: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -84448,9 +85208,18 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+input:
+stderr:
+1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
 ./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 2
+./calc.at:1374:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1367: cat stderr
+syntax error
+./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1371: cat stderr
+stderr:
+input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -84565,7 +85334,20 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-493. calc.at:1367:  ok
+  | 1 + 2 * 3 + !- ++
+./calc.at:1370:  $PREPARSER ./calc  input
+stderr:
+stderr:
+syntax error
+input:
+  | 
+  | +1
+./calc.at:1371:  $PREPARSER ./calc  input
+./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
+stderr:
+./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1364: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -84576,12 +85358,30 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
 ./calc.at:1364: cat stderr
 input:
-
+./calc.at:1370: cat stderr
+./calc.at:1371: cat stderr
   | (1 + 1) / (1 - 1)
 ./calc.at:1364:  $PREPARSER ./calc  input
 stderr:
+input:
+./calc.at:1374: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1371:  $PREPARSER ./calc  /dev/null
+  | 1 + 2 * 3 + !* ++
+./calc.at:1370:  $PREPARSER ./calc  input
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -84723,8 +85523,17 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+stderr:
+./calc.at:1390: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
 ./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.14: memory exhausted
 stderr:
+1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+stderr:
+./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1374: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -84866,6 +85675,9 @@
 Stack now 0 6 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+./calc.at:1371: cat stderr
+1.14: memory exhausted
 ./calc.at:1364: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -84876,185 +85688,83 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1364: cat stderr
-497. calc.at:1371: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} api.push-pull=both api.pure=full parse.lac=full  ...
-./calc.at:1371: mv calc.y.tmp calc.y
-
-./calc.at:1371: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-492. calc.at:1364:  ok
-
-stderr:
-stdout:
-./calc.at:1368: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
-./calc.at:1368: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c
-
-input:
-./calc.at:1371: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1368:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1368: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | 1 2
-./calc.at:1368:  $PREPARSER ./calc  input
-498. calc.at:1374: testing Calculator %start input exp NUM api.value.type=union  ...
-./calc.at:1374: mv calc.y.tmp calc.y
-
-stderr:
-1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
-./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-stderr:
-1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
-./calc.at:1368: cat stderr
-input:
-  | 1//2
-./calc.at:1368:  $PREPARSER ./calc  input
-stderr:
-1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1368: cat stderr
-stderr:
-input:
-stdout:
-  | error
-./calc.at:1368:  $PREPARSER ./calc  input
-./calc.at:1369: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
-stderr:
-1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1369: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c
-
-1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-input:
-./calc.at:1368: cat stderr
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1369:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1374: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
-input:
-./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-  | 1 = 2 = 3
-./calc.at:1368:  $PREPARSER ./calc  input
-./calc.at:1369: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-stderr:
-1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
-./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-stderr:
-  | 1 2
-./calc.at:1369:  $PREPARSER ./calc  input
-1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
-stderr:
-1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
-./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1368: cat stderr
-1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
 input:
-  | 
-  | +1
-./calc.at:1368:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1369: cat stderr
-2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1364: cat stderr
 input:
-./calc.at:1368: cat stderr
   | 1//2
-./calc.at:1369:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1368:  $PREPARSER ./calc  /dev/null
-1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-stderr:
-1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-stderr:
-1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1369: cat stderr
-./calc.at:1368: cat stderr
-input:
-input:
-  | error
+./calc.at:1374:  $PREPARSER ./calc  input
+./calc.at:1370: cat stderr
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1368:  $PREPARSER ./calc  input
-./calc.at:1369:  $PREPARSER ./calc  input
+./calc.at:1371:  $PREPARSER ./calc  input
 stderr:
+syntax error
 stderr:
+./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
 1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
 1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 1.1-46: error: 4444 != 1
-1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
+syntax error
+492. calc.at:1364:  ok
+  | (#) + (#) = 2222
+./calc.at:1370:  $PREPARSER ./calc  input
+./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
 1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
 1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 1.1-46: error: 4444 != 1
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1374: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1371: cat stderr
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+
+input:
+  | (!!) + (1 2) = 1
+./calc.at:1371:  $PREPARSER ./calc  input
+./calc.at:1374: cat stderr
+./calc.at:1370: cat stderr
+stderr:
+1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-16: error: 2222 != 1
+./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
+  | error
+./calc.at:1374:  $PREPARSER ./calc  input
+input:
 stderr:
-1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
 stdout:
-./calc.at:1370: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
-./calc.at:1368: cat stderr
-./calc.at:1370: "$PERL" -ne '
+  | (1 + #) = 1111
+1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-16: error: 2222 != 1
+./calc.at:1370:  $PREPARSER ./calc  input
+stderr:
+syntax error
+./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1375: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
+stderr:
+stderr:
+1.6: syntax error: invalid character: '#'
+./calc.at:1375: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -85065,9 +85775,29 @@
         || /\t/
         )' calc.c
 
-./calc.at:1369: cat stderr
+syntax error
+./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1371: cat stderr
+stderr:
+1.6: syntax error: invalid character: '#'
+503. calc.at:1391: testing Calculator %glr-parser %locations api.location.type={Span}  ...
+./calc.at:1374: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 input:
+./calc.at:1391: mv calc.y.tmp calc.y
+
+./calc.at:1370: cat stderr
 input:
+  | (- *) + (1 2) = 1
+./calc.at:1371:  $PREPARSER ./calc  input
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -85081,422 +85811,197 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1370:  $PREPARSER ./calc  input
-stderr:
-  | (!!) + (1 2) = 1
-./calc.at:1368:  $PREPARSER ./calc  input
+./calc.at:1375:  $PREPARSER ./calc  input
 input:
+  | (# + 1) = 1111
+./calc.at:1391: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+./calc.at:1370:  $PREPARSER ./calc  input
 stderr:
-1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-16: error: 2222 != 1
-./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1 = 2 = 3
-./calc.at:1369:  $PREPARSER ./calc  input
-stderr:
-stderr:
+./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1370: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
-1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-16: error: 2222 != 1
-./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error: invalid character: '#'
 stderr:
-1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
-input:
-  | 1 2
-./calc.at:1370:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1368: cat stderr
-1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
-./calc.at:1369: cat stderr
 ./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
+./calc.at:1374: cat stderr
+1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-17: error: 2222 != 1
+./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 input:
-1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
-  | (- *) + (1 2) = 1
-./calc.at:1368:  $PREPARSER ./calc  input
 stderr:
-  | 
-  | +1
-./calc.at:1369:  $PREPARSER ./calc  input
+  | 1 = 2 = 3
+./calc.at:1374:  $PREPARSER ./calc  input
+input:
 1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
 1.1-17: error: 2222 != 1
+1.2: syntax error: invalid character: '#'
 stderr:
-./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
+  | 1 2
+./calc.at:1375:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1391: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
+1.3: syntax error, unexpected number
+./calc.at:1371: cat stderr
+./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1370: cat stderr
+syntax error
+./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-17: error: 2222 != 1
-2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1368: cat stderr
-./calc.at:1369: cat stderr
-input:
-  | 1//2
-./calc.at:1369:  $PREPARSER ./calc  /dev/null
-./calc.at:1370:  $PREPARSER ./calc  input
 stderr:
 input:
+input:
+syntax error
+1.3: syntax error, unexpected number
+  | (1 + # + 1) = 1111
+./calc.at:1370:  $PREPARSER ./calc  input
 stderr:
-1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
+1.6: syntax error: invalid character: '#'
   | (* *) + (*) + (*)
-./calc.at:1368:  $PREPARSER ./calc  input
+./calc.at:1371:  $PREPARSER ./calc  input
+./calc.at:1375: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 stderr:
 1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1374: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
-1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.6: syntax error: invalid character: '#'
+./calc.at:1375: cat stderr
 stderr:
 1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1369: cat stderr
 ./calc.at:1370: cat stderr
-./calc.at:1368: cat stderr
-input:
-  | error
-input:
-./calc.at:1370:  $PREPARSER ./calc  input
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1369:  $PREPARSER ./calc  input
-stderr:
-input:
-stderr:
-1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1368:  $PREPARSER ./calc  input
-1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.1-46: error: 4444 != 1
-./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-./calc.at:1368: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-stderr:
-1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.1-46: error: 4444 != 1
+./calc.at:1374: cat stderr
 input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1370: cat stderr
-./calc.at:1368:  $PREPARSER ./calc  input
-./calc.at:1369: cat stderr
+  | 1//2
+./calc.at:1375:  $PREPARSER ./calc  input
+./calc.at:1371: cat stderr
 stderr:
-./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-stderr:
 input:
-  | (!!) + (1 2) = 1
-./calc.at:1369:  $PREPARSER ./calc  input
-  | 1 = 2 = 3
+  | (1 + 1) / (1 - 1)
 ./calc.at:1370:  $PREPARSER ./calc  input
-stderr:
-stderr:
-1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-16: error: 2222 != 1
-1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
-./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1368: cat stderr
-stderr:
-stderr:
-1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-16: error: 2222 != 1
-1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
-input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1368:  $PREPARSER ./calc  input
-./calc.at:1369: cat stderr
-stderr:
-1.14: memory exhausted
-./calc.at:1370: cat stderr
-./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1369:  $PREPARSER ./calc  input
-stderr:
-input:
-1.14: memory exhausted
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 
   | +1
-./calc.at:1370:  $PREPARSER ./calc  input
-stderr:
-stderr:
-1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-17: error: 2222 != 1
-2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1368: cat stderr
-stderr:
-stderr:
-2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
-1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-17: error: 2222 != 1
-input:
-  | (#) + (#) = 2222
-./calc.at:1368:  $PREPARSER ./calc  input
-./calc.at:1370: cat stderr
-./calc.at:1369: cat stderr
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1370:  $PREPARSER ./calc  /dev/null
-stderr:
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-input:
-./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (* *) + (*) + (*)
-./calc.at:1369:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1368: cat stderr
+./calc.at:1374:  $PREPARSER ./calc  input
 stderr:
-input:
-1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-  | (1 + #) = 1111
-./calc.at:1368:  $PREPARSER ./calc  input
-./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
-./calc.at:1370: cat stderr
-1.6: syntax error: invalid character: '#'
-./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+syntax error
+./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+1.11-17: error: null divisor
 input:
-./calc.at:1369: cat stderr
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1370:  $PREPARSER ./calc  input
-stderr:
-stderr:
-1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.1-46: error: 4444 != 1
-1.6: syntax error: invalid character: '#'
 ./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.1-46: error: 4444 != 1
-input:
-./calc.at:1368: cat stderr
   | 1 + 2 * 3 + !+ ++
-./calc.at:1369:  $PREPARSER ./calc  input
-./calc.at:1370: cat stderr
-stderr:
-input:
-  | (# + 1) = 1111
-./calc.at:1368:  $PREPARSER ./calc  input
-input:
-stderr:
-  | (!!) + (1 2) = 1
-./calc.at:1370:  $PREPARSER ./calc  input
-./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.2: syntax error: invalid character: '#'
-./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-./calc.at:1369: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-stderr:
-1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-16: error: 2222 != 1
-1.2: syntax error: invalid character: '#'
-./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1369:  $PREPARSER ./calc  input
-./calc.at:1368: cat stderr
-stderr:
-stderr:
-1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-16: error: 2222 != 1
-input:
-./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (1 + # + 1) = 1111
-stderr:
-./calc.at:1368:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1370: cat stderr
-1.6: syntax error: invalid character: '#'
-./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1369: cat stderr
-input:
-./calc.at:1368: cat stderr
-  | (- *) + (1 2) = 1
-input:
-./calc.at:1370:  $PREPARSER ./calc  input
-  | 1 + 2 * 3 + !* ++
-stderr:
-./calc.at:1369:  $PREPARSER ./calc  input
-1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-17: error: 2222 != 1
-input:
-./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (1 + 1) / (1 - 1)
-stderr:
-./calc.at:1368:  $PREPARSER ./calc  input
+./calc.at:1371:  $PREPARSER ./calc  input
 stderr:
 stderr:
-1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-17: error: 2222 != 1
-1.14: memory exhausted
-1.11-17: error: null divisor
-./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1370: cat stderr
 stderr:
+./calc.at:1375: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 1.11-17: error: null divisor
+syntax error
+./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.14: memory exhausted
-./calc.at:1368: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1370:  $PREPARSER ./calc  input
-494. calc.at:1368:  ok
-stderr:
-1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1369: cat stderr
-./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-input:
-  | (#) + (#) = 2222
-./calc.at:1369:  $PREPARSER ./calc  input
-stderr:
-
+./calc.at:1371: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+./calc.at:1375: cat stderr
 ./calc.at:1370: cat stderr
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1370:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1369: cat stderr
-stderr:
-stderr:
-stdout:
-499. calc.at:1375: testing Calculator %start input exp NUM api.value.type=union %locations parse.error=detailed  ...
-./calc.at:1370: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-./calc.at:1374: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
-input:
-./calc.at:1375: mv calc.y.tmp calc.y
-
-./calc.at:1374: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c
-
-  | (1 + #) = 1111
-./calc.at:1369:  $PREPARSER ./calc  input
+./calc.at:1374: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 input:
-stderr:
-./calc.at:1375: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1374:  $PREPARSER ./calc  input
 input:
-stderr:
-1.6: syntax error: invalid character: '#'
   | 1 + 2 * 3 + !- ++
-./calc.at:1370:  $PREPARSER ./calc  input
-./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1371:  $PREPARSER ./calc  input
+  | error
+496. calc.at:1370:  ok
+./calc.at:1375:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+1.1: syntax error, unexpected invalid token
+./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1374: cat stderr
 stderr:
-1.6: syntax error: invalid character: '#'
+./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.1: syntax error, unexpected invalid token
+./calc.at:1374:  $PREPARSER ./calc  /dev/null
 stderr:
-input:
-  | 1 2
-./calc.at:1369: cat stderr
-./calc.at:1374:  $PREPARSER ./calc  input
-./calc.at:1370: cat stderr
 stderr:
 syntax error
 ./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1375: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+
+./calc.at:1371: cat stderr
+syntax error
 input:
-input:
-  | (# + 1) = 1111
-./calc.at:1369:  $PREPARSER ./calc  input
+./calc.at:1375: cat stderr
   | 1 + 2 * 3 + !* ++
-./calc.at:1370:  $PREPARSER ./calc  input
-syntax error
-stderr:
+./calc.at:1371:  $PREPARSER ./calc  input
 stderr:
-1.2: syntax error: invalid character: '#'
+input:
+504. calc.at:1392: testing Calculator %glr-parser %name-prefix "calc"  ...
 1.14: memory exhausted
-./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 = 2 = 3
+./calc.at:1375:  $PREPARSER ./calc  input
+./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1392: mv calc.y.tmp calc.y
+
 stderr:
 1.14: memory exhausted
+stderr:
 ./calc.at:1374: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -85507,45 +86012,21 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+1.7: syntax error, unexpected '='
+./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1392: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+./calc.at:1371: cat stderr
 stderr:
-./calc.at:1375: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
-1.2: syntax error: invalid character: '#'
-./calc.at:1370: cat stderr
+1.7: syntax error, unexpected '='
 ./calc.at:1374: cat stderr
-./calc.at:1369: cat stderr
-input:
 input:
-  | 1//2
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1369:  $PREPARSER ./calc  input
-./calc.at:1374:  $PREPARSER ./calc  input
   | (#) + (#) = 2222
-./calc.at:1370:  $PREPARSER ./calc  input
-stderr:
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-syntax error
-./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
+./calc.at:1371:  $PREPARSER ./calc  input
 stderr:
-1.6: syntax error: invalid character: '#'
-syntax error
 1.2: syntax error: invalid character: '#'
 1.8: syntax error: invalid character: '#'
-./calc.at:1369: cat stderr
 input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1369:  $PREPARSER ./calc  input
-./calc.at:1370: cat stderr
-stderr:
-./calc.at:1374: "$PERL" -pi -e 'use strict;
+./calc.at:1375: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -85555,37 +86036,38 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.11-17: error: null divisor
-./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-stderr:
-1.11-17: error: null divisor
-  | (1 + #) = 1111
-./calc.at:1370:  $PREPARSER ./calc  input
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1369: cat stderr
-./calc.at:1374: cat stderr
-./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-495. calc.at:1369:  ok
-1.6: syntax error: invalid character: '#'
-input:
-  | error
+./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 ./calc.at:1374:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1375: cat stderr
 syntax error
+syntax error
+syntax error
+syntax error
+error: 4444 != 1
 ./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-
-./calc.at:1370: cat stderr
 stderr:
-syntax error
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+stderr:
+./calc.at:1392: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
 input:
-  | (# + 1) = 1111
-./calc.at:1370:  $PREPARSER ./calc  input
+  | 
+  | +1
+./calc.at:1375:  $PREPARSER ./calc  input
+syntax error
+syntax error
+syntax error
+syntax error
+error: 4444 != 1
 stderr:
-1.2: syntax error: invalid character: '#'
-./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+2.1: syntax error, unexpected '+'
+./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1371: cat stderr
+2.1: syntax error, unexpected '+'
+input:
 ./calc.at:1374: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -85596,32 +86078,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-1.2: syntax error: invalid character: '#'
-./calc.at:1370: cat stderr
-./calc.at:1374: cat stderr
-500. calc.at:1387: testing Calculator %glr-parser   ...
-./calc.at:1387: mv calc.y.tmp calc.y
-
-./calc.at:1387: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-input:
-  | 1 = 2 = 3
-./calc.at:1374:  $PREPARSER ./calc  input
-stderr:
-syntax error
-input:
-./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-  | (1 + # + 1) = 1111
-./calc.at:1370:  $PREPARSER ./calc  input
-stderr:
-1.6: syntax error: invalid character: '#'
-stderr:
-stdout:
-./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1371: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
-./calc.at:1374: "$PERL" -pi -e 'use strict;
+./calc.at:1375: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -85631,7 +86088,14 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1371: "$PERL" -ne '
+  | (1 + #) = 1111
+./calc.at:1371:  $PREPARSER ./calc  input
+stderr:
+stderr:
+./calc.at:1375: cat stderr
+1.6: syntax error: invalid character: '#'
+stdout:
+./calc.at:1387: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -85642,9 +86106,15 @@
         || /\t/
         )' calc.c
 
-stderr:
 ./calc.at:1374: cat stderr
+./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1375:  $PREPARSER ./calc  /dev/null
+stderr:
+stderr:
 1.6: syntax error: invalid character: '#'
+1.1: syntax error, unexpected end of file
+input:
+./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
@@ -85659,38 +86129,21 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1371:  $PREPARSER ./calc  input
-input:
-./calc.at:1370: cat stderr
-stderr:
-  | 
-  | +1
+./calc.at:1387:  $PREPARSER ./calc  input
+  | (!!) + (1 2) = 1
 ./calc.at:1374:  $PREPARSER ./calc  input
 stderr:
-syntax error
-./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1371: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
 stderr:
+1.1: syntax error, unexpected end of file
+./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 syntax error
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1370:  $PREPARSER ./calc  input
-  | 1 2
-./calc.at:1371:  $PREPARSER ./calc  input
-stderr:
-stderr:
-1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
-1.11-17: error: null divisor
-./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+error: 2222 != 1
+./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1371: cat stderr
 stderr:
 stderr:
-1.11-17: error: null divisor
-1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
-./calc.at:1374: "$PERL" -pi -e 'use strict;
+./calc.at:1375: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -85700,27 +86153,24 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1370: cat stderr
-./calc.at:1387: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
-./calc.at:1371: cat stderr
-496. calc.at:1370:  ok
-./calc.at:1374: cat stderr
 input:
-  | 1//2
-./calc.at:1374:  $PREPARSER ./calc  /dev/null
-./calc.at:1371:  $PREPARSER ./calc  input
-stderr:
-
+input:
+./calc.at:1375: cat stderr
 syntax error
-./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+error: 2222 != 1
+  | (# + 1) = 1111
+./calc.at:1371:  $PREPARSER ./calc  input
+  | 1 2
+./calc.at:1387:  $PREPARSER ./calc  input
 stderr:
-1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 syntax error
-stderr:
-1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1371: cat stderr
+./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error: invalid character: '#'
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1375:  $PREPARSER ./calc  input
+./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1374: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -85731,42 +86181,26 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-./calc.at:1374: cat stderr
-  | error
-./calc.at:1371:  $PREPARSER ./calc  input
-stderr:
-input:
-1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1374:  $PREPARSER ./calc  input
-501. calc.at:1389: testing Calculator %glr-parser %header  ...
 stderr:
 stderr:
-./calc.at:1389: mv calc.y.tmp calc.y
-
-1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-syntax error
-syntax error
-syntax error
 syntax error
-error: 4444 != 1
-./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.1-46: error: 4444 != 1
+./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1371: cat stderr
-./calc.at:1389: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-syntax error
-syntax error
-syntax error
-syntax error
-error: 4444 != 1
-input:
-  | 1 = 2 = 3
-./calc.at:1371:  $PREPARSER ./calc  input
+./calc.at:1374: cat stderr
+1.2: syntax error: invalid character: '#'
 stderr:
-1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
-./calc.at:1374: "$PERL" -pi -e 'use strict;
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.1-46: error: 4444 != 1
+stderr:
+./calc.at:1387: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -85776,33 +86210,24 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
-./calc.at:1371: cat stderr
-./calc.at:1374: cat stderr
-input:
-  | 
-  | +1
+stdout:
+./calc.at:1389: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c calc.h
+
 input:
-  | (!!) + (1 2) = 1
-./calc.at:1371:  $PREPARSER ./calc  input
-./calc.at:1374:  $PREPARSER ./calc  input
-stderr:
-2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
-stderr:
-syntax error
-error: 2222 != 1
-./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1389: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
-stderr:
-stderr:
-2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
-syntax error
-error: 2222 != 1
 ./calc.at:1371: cat stderr
-./calc.at:1374: "$PERL" -pi -e 'use strict;
+  | (- *) + (1 2) = 1
+./calc.at:1374:  $PREPARSER ./calc  input
+input:
+./calc.at:1375: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -85812,37 +86237,63 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1371:  $PREPARSER ./calc  /dev/null
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1389:  $PREPARSER ./calc  input
 stderr:
-1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1374: cat stderr
+./calc.at:1375: cat stderr
+./calc.at:1387: cat stderr
 stderr:
-1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | (- *) + (1 2) = 1
-./calc.at:1374:  $PREPARSER ./calc  input
-./calc.at:1371: cat stderr
-stderr:
+  | (1 + # + 1) = 1111
+./calc.at:1371:  $PREPARSER ./calc  input
 syntax error
 syntax error
 error: 2222 != 1
 ./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
 input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1371:  $PREPARSER ./calc  input
+input:
+1.6: syntax error: invalid character: '#'
 stderr:
+  | 1//2
+./calc.at:1387:  $PREPARSER ./calc  input
 syntax error
 syntax error
 error: 2222 != 1
+input:
 stderr:
-1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.1-46: error: 4444 != 1
+syntax error
+  | (!!) + (1 2) = 1
+./calc.at:1375:  $PREPARSER ./calc  input
+./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 2
+./calc.at:1389:  $PREPARSER ./calc  input
 ./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
+stderr:
+stderr:
+syntax error
+./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.11: syntax error, unexpected number
+1.1-16: error: 2222 != 1
+./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.6: syntax error: invalid character: '#'
+syntax error
 ./calc.at:1374: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -85853,37 +86304,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.1-46: error: 4444 != 1
-./calc.at:1371: cat stderr
-./calc.at:1374: cat stderr
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1371:  $PREPARSER ./calc  input
-input:
-stderr:
-  | (* *) + (*) + (*)
-./calc.at:1374:  $PREPARSER ./calc  input
-1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-16: error: 2222 != 1
-./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 syntax error
-syntax error
-syntax error
-./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-stderr:
-syntax error
-syntax error
-syntax error
-1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.11: syntax error, unexpected number
 1.1-16: error: 2222 != 1
 ./calc.at:1371: cat stderr
-./calc.at:1374: "$PERL" -pi -e 'use strict;
+./calc.at:1375: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -85893,53 +86320,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1371:  $PREPARSER ./calc  input
-stderr:
-1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-17: error: 2222 != 1
-./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1374: cat stderr
-1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-17: error: 2222 != 1
-input:
-./calc.at:1371: cat stderr
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1374:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-stderr:
-  | (* *) + (*) + (*)
-./calc.at:1371:  $PREPARSER ./calc  input
-./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-stderr:
-1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-input:
-./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1374:  $PREPARSER ./calc  input
-1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-stderr:
-./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1371: cat stderr
-stderr:
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1371:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1374: "$PERL" -pi -e 'use strict;
+./calc.at:1387: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -85949,29 +86330,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1371: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1371:  $PREPARSER ./calc  input
-./calc.at:1374: cat stderr
-stderr:
-./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 1 + 2 * 3 + !* ++
-stderr:
-./calc.at:1374:  $PREPARSER ./calc  input
-stderr:
-memory exhausted
-./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1371: cat stderr
-stderr:
-memory exhausted
-input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1371:  $PREPARSER ./calc  input
-stderr:
-1.14: memory exhausted
-./calc.at:1374: "$PERL" -pi -e 'use strict;
+./calc.at:1389: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -85981,29 +86340,53 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.14: memory exhausted
+input:
 ./calc.at:1374: cat stderr
+./calc.at:1375: cat stderr
+./calc.at:1387: cat stderr
+  | (1 + 1) / (1 - 1)
+./calc.at:1371:  $PREPARSER ./calc  input
 input:
-  | (#) + (#) = 2222
+./calc.at:1389: cat stderr
+stderr:
+1.11-17: error: null divisor
+  | (* *) + (*) + (*)
 ./calc.at:1374:  $PREPARSER ./calc  input
+input:
+input:
+  | (- *) + (1 2) = 1
+./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1375:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1371: cat stderr
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | error
+./calc.at:1387:  $PREPARSER ./calc  input
 stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
+stderr:
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
+1.1-17: error: 2222 != 1
 input:
-  | (#) + (#) = 2222
-./calc.at:1371:  $PREPARSER ./calc  input
+syntax error
+syntax error
+syntax error
+1.11-17: error: null divisor
+./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1374: "$PERL" -pi -e 'use strict;
+stderr:
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
+1.1-17: error: 2222 != 1
+  | 1//2
+./calc.at:1389:  $PREPARSER ./calc  input
+stderr:
+syntax error
+./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
+./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+./calc.at:1375: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86013,27 +86396,18 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1374: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1374:  $PREPARSER ./calc  input
 ./calc.at:1371: cat stderr
 stderr:
-syntax error: invalid character: '#'
+syntax error
+497. calc.at:1371:  ok
+syntax error
+syntax error
+syntax error
+syntax error
+./calc.at:1375: cat stderr
 stderr:
-./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stdout:
-stderr:
-./calc.at:1375: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE'
-input:
-syntax error: invalid character: '#'
-  | (1 + #) = 1111
-./calc.at:1371:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1375: "$PERL" -ne '
+./calc.at:1390: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -86044,9 +86418,9 @@
         || /\t/
         )' calc.c
 
-1.6: syntax error: invalid character: '#'
-./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+input:
+  | (* *) + (*) + (*)
+./calc.at:1375:  $PREPARSER ./calc  input
 ./calc.at:1374: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -86057,8 +86431,17 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1389: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 input:
-1.6: syntax error: invalid character: '#'
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -86072,39 +86455,9 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1375:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1374: cat stderr
-./calc.at:1371: cat stderr
-stderr:
-./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-input:
-  | (# + 1) = 1111
-./calc.at:1374:  $PREPARSER ./calc  input
-  | (# + 1) = 1111
-./calc.at:1371:  $PREPARSER ./calc  input
-stderr:
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.2: syntax error: invalid character: '#'
-input:
-  | 1 2
-stderr:
-./calc.at:1375:  $PREPARSER ./calc  input
-./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error: invalid character: '#'
-1.3: syntax error, unexpected number
-./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error: invalid character: '#'
+./calc.at:1390:  $PREPARSER ./calc  input
 stderr:
-1.3: syntax error, unexpected number
-./calc.at:1371: cat stderr
-./calc.at:1374: "$PERL" -pi -e 'use strict;
+./calc.at:1387: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86114,6 +86467,29 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+
+./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1389: cat stderr
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1374: cat stderr
+stderr:
+stderr:
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+input:
+./calc.at:1387: cat stderr
+input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1374:  $PREPARSER ./calc  input
+input:
+  | 1 2
+./calc.at:1390:  $PREPARSER ./calc  input
+  | error
 ./calc.at:1375: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -86124,35 +86500,28 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1371:  $PREPARSER ./calc  input
-./calc.at:1375: cat stderr
+./calc.at:1389:  $PREPARSER ./calc  input
 stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1374: cat stderr
-./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
 stderr:
-  | 1//2
-./calc.at:1375:  $PREPARSER ./calc  input
-1.6: syntax error: invalid character: '#'
+  | 1 = 2 = 3
+./calc.at:1387:  $PREPARSER ./calc  input
 stderr:
-input:
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1371: cat stderr
-  | (1 + # + 1) = 1111
-./calc.at:1374:  $PREPARSER ./calc  input
 stderr:
+1.3: syntax error
+./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
+./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
+./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+1.3: syntax error
+./calc.at:1375: cat stderr
 ./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
-input:
-./calc.at:1375: "$PERL" -pi -e 'use strict;
+syntax error
+stderr:
+./calc.at:1390: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86162,15 +86531,23 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | (1 + 1) / (1 - 1)
-./calc.at:1371:  $PREPARSER ./calc  input
-./calc.at:1375: cat stderr
+./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+input:
 stderr:
-1.11-17: error: null divisor
-./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1375:  $PREPARSER ./calc  input
 stderr:
-1.11-17: error: null divisor
-./calc.at:1374: "$PERL" -pi -e 'use strict;
+505. calc.at:1393: testing Calculator %glr-parser api.prefix={calc}  ...
+./calc.at:1393: mv calc.y.tmp calc.y
+
+./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1393: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+stderr:
+./calc.at:1390: cat stderr
+./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+input:
+./calc.at:1389: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86180,19 +86557,19 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+  | 1 + 2 * 3 + !- ++
+./calc.at:1374:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | error
+  | 1//2
+./calc.at:1390:  $PREPARSER ./calc  input
+input:
+  | 1 + 2 * 3 + !- ++
 ./calc.at:1375:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1371: cat stderr
-1.1: syntax error, unexpected invalid token
-./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1374: cat stderr
 stderr:
-1.1: syntax error, unexpected invalid token
-497. calc.at:1371:  ok
-input:
-./calc.at:1375: "$PERL" -pi -e 'use strict;
+./calc.at:1387: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86202,24 +86579,14 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | (1 + 1) / (1 - 1)
-./calc.at:1374:  $PREPARSER ./calc  input
-stderr:
-error: null divisor
-./calc.at:1375: cat stderr
-./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-input:
-error: null divisor
-
-  | 1 = 2 = 3
-./calc.at:1375:  $PREPARSER ./calc  input
+./calc.at:1389: cat stderr
 stderr:
-1.7: syntax error, unexpected '='
+1.3: syntax error
+./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.7: syntax error, unexpected '='
-./calc.at:1375: "$PERL" -pi -e 'use strict;
+stderr:
+./calc.at:1374: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86229,7 +86596,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1374: "$PERL" -pi -e 'use strict;
+input:
+1.3: syntax error
+./calc.at:1375: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86239,27 +86608,19 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1387: cat stderr
+  | 1 = 2 = 3
+./calc.at:1389:  $PREPARSER ./calc  input
 ./calc.at:1375: cat stderr
 input:
-./calc.at:1374: cat stderr
+stderr:
+syntax error
+./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 
   | +1
-./calc.at:1375:  $PREPARSER ./calc  input
-stderr:
-input:
-2.1: syntax error, unexpected '+'
-./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-502. calc.at:1390: testing Calculator %glr-parser %locations  ...
-./calc.at:1390: mv calc.y.tmp calc.y
-
-  | 123
-./calc.at:1390: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-./calc.at:1374:  $PREPARSER ./calc --num input
-stderr:
+./calc.at:1387:  $PREPARSER ./calc  input
 stderr:
-2.1: syntax error, unexpected '+'
-./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1375: "$PERL" -pi -e 'use strict;
+./calc.at:1390: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86269,23 +86630,28 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
 stderr:
-./calc.at:1375: cat stderr
-./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-./calc.at:1375:  $PREPARSER ./calc  /dev/null
+  | 1 + 2 * 3 + !* ++
+./calc.at:1374: cat stderr
+syntax error
+./calc.at:1375:  $PREPARSER ./calc  input
 stderr:
-1.1: syntax error, unexpected end of file
+1.14: memory exhausted
 ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
+./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | 1 + 2 * 3
-./calc.at:1374:  $PREPARSER ./calc --num input
+  | 1 + 2 * 3 + !* ++
+./calc.at:1374:  $PREPARSER ./calc  input
+./calc.at:1393: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
 stderr:
 stderr:
 syntax error
-1.1: syntax error, unexpected end of file
+memory exhausted
 ./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1375: "$PERL" -pi -e 'use strict;
+./calc.at:1389: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86295,10 +86661,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-syntax error
-./calc.at:1375: cat stderr
-input:
-./calc.at:1374: "$PERL" -pi -e 'use strict;
+1.14: memory exhausted
+stderr:
+memory exhausted
+./calc.at:1390: cat stderr
+./calc.at:1387: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86308,30 +86675,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1375:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1390: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.1-46: error: 4444 != 1
-./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-./calc.at:1374: cat stderr
-stdout:
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.1-46: error: 4444 != 1
-./types.at:139:  $PREPARSER ./test
-stderr:
-input:
-  | 1 + 2 * 3
-./calc.at:1374:  $PREPARSER ./calc --exp input
+./calc.at:1389: cat stderr
 ./calc.at:1375: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -86342,26 +86686,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-450. types.at:139: ./calc.at:1375: cat stderr
- ok
-./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 input:
-  | (!!) + (1 2) = 1
-./calc.at:1375:  $PREPARSER ./calc  input
-498. calc.at:1374: stderr:
- ok
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
-./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
-./calc.at:1375: "$PERL" -pi -e 'use strict;
+./calc.at:1374: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86371,21 +86697,44 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-
+  | error
+./calc.at:1390:  $PREPARSER ./calc  input
+input:
+stderr:
 ./calc.at:1375: cat stderr
+1.1: syntax error
+./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 
+  | +1
+./calc.at:1389:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1374: cat stderr
+./calc.at:1387: cat stderr
+syntax error
+./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 input:
-  | (- *) + (1 2) = 1
+./calc.at:1387:  $PREPARSER ./calc  /dev/null
+input:
+1.1: syntax error
+  | (#) + (#) = 2222
+./calc.at:1374:  $PREPARSER ./calc  input
+  | (#) + (#) = 2222
 ./calc.at:1375:  $PREPARSER ./calc  input
 stderr:
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
+stderr:
+stderr:
+syntax error
+./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
 ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
-./calc.at:1375: "$PERL" -pi -e 'use strict;
+syntax error
+./calc.at:1390: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86395,24 +86744,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1375: cat stderr
-input:
-503. calc.at:1391: testing Calculator %glr-parser %locations api.location.type={Span}  ...
-./calc.at:1391: mv calc.y.tmp calc.y
-
-  | (* *) + (*) + (*)
-./calc.at:1375:  $PREPARSER ./calc  input
-./calc.at:1391: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
 stderr:
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
 stderr:
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-./calc.at:1375: "$PERL" -pi -e 'use strict;
+stderr:
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1389: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86422,25 +86760,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1375: cat stderr
-504. calc.at:1392: testing Calculator %glr-parser %name-prefix "calc"  ...
-./calc.at:1392: mv calc.y.tmp calc.y
-
-./calc.at:1392: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1375:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1375:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1375: "$PERL" -pi -e 'use strict;
+./calc.at:1390: cat stderr
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1387: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86450,17 +86773,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1375: cat stderr
-./calc.at:1392: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
-./calc.at:1391: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
 input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1375:  $PREPARSER ./calc  input
-stderr:
-1.14: memory exhausted
-./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 = 2 = 3
+./calc.at:1390:  $PREPARSER ./calc  input
 stderr:
-1.14: memory exhausted
+1.7: syntax error
+./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1375: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -86471,18 +86789,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1375: cat stderr
-input:
-  | (#) + (#) = 2222
-./calc.at:1375:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1375: "$PERL" -pi -e 'use strict;
+./calc.at:1389: cat stderr
+1.7: syntax error
+./calc.at:1375: cat stderr
+./calc.at:1389:  $PREPARSER ./calc  /dev/null
+./calc.at:1387: cat stderr
+./calc.at:1374: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86492,16 +86805,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1375: cat stderr
 input:
   | (1 + #) = 1111
 ./calc.at:1375:  $PREPARSER ./calc  input
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1375: "$PERL" -pi -e 'use strict;
+./calc.at:1390: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86511,16 +86818,31 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1375: cat stderr
+stderr:
+syntax error
+./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | (# + 1) = 1111
-./calc.at:1375:  $PREPARSER ./calc  input
 stderr:
-1.2: syntax error: invalid character: '#'
+1.6: syntax error: invalid character: '#'
 ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.2: syntax error: invalid character: '#'
-./calc.at:1375: "$PERL" -pi -e 'use strict;
+syntax error
+stderr:
+./calc.at:1390: cat stderr
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1374: cat stderr
+./calc.at:1387:  $PREPARSER ./calc  input
+stderr:
+1.6: syntax error: invalid character: '#'
+syntax error
+syntax error
+syntax error
+syntax error
+error: 4444 != 1
+./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+input:
+./calc.at:1389: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86530,15 +86852,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1375: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1375:  $PREPARSER ./calc  input
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (1 + #) = 1111
+  | 
+  | +1
+./calc.at:1390:  $PREPARSER ./calc  input
+./calc.at:1374:  $PREPARSER ./calc  input
 stderr:
-1.6: syntax error: invalid character: '#'
 ./calc.at:1375: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -86549,16 +86868,40 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+stderr:
+syntax error
+syntax error
+syntax error
+syntax error
+error: 4444 != 1
+2.1: syntax error
+./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1375: cat stderr
+./calc.at:1389: cat stderr
+syntax error: invalid character: '#'
+./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | (1 + 1) / (1 - 1)
+stderr:
+stderr:
+syntax error: invalid character: '#'
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1389:  $PREPARSER ./calc  input
+input:
+  | (# + 1) = 1111
 ./calc.at:1375:  $PREPARSER ./calc  input
 stderr:
-1.11-17: error: null divisor
-./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
+syntax error
+syntax error
+syntax error
+error: 4444 != 1
+./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+2.1: syntax error
 stderr:
-1.11-17: error: null divisor
-./calc.at:1375: "$PERL" -pi -e 'use strict;
+1.2: syntax error: invalid character: '#'
+./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1387: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86568,23 +86911,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1375: cat stderr
-input:
-  | 123
-./calc.at:1375:  $PREPARSER ./calc --num input
-stderr:
-./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | 1 + 2 * 3
-./calc.at:1375:  $PREPARSER ./calc --num input
 stderr:
-1.3: syntax error, unexpected '+', expecting end of file
-./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.3: syntax error, unexpected '+', expecting end of file
-./calc.at:1375: "$PERL" -pi -e 'use strict;
+1.2: syntax error: invalid character: '#'
+./calc.at:1390: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86594,61 +86924,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1375: cat stderr
-input:
-  | 1 + 2 * 3
-./calc.at:1375:  $PREPARSER ./calc --exp input
-stderr:
-./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-499. calc.at:1375:  ok
-
-505. calc.at:1393: testing Calculator %glr-parser api.prefix={calc}  ...
-./calc.at:1393: mv calc.y.tmp calc.y
-
-./calc.at:1393: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-./calc.at:1393: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
-stderr:
-stdout:
-./calc.at:1387: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c
-
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1387:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-input:
-  | 1 2
-./calc.at:1387:  $PREPARSER ./calc  input
-stderr:
 syntax error
-./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 syntax error
-./calc.at:1387: "$PERL" -pi -e 'use strict;
+syntax error
+syntax error
+error: 4444 != 1
+./calc.at:1374: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86658,16 +86939,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1387: cat stderr
-input:
-  | 1//2
-./calc.at:1387:  $PREPARSER ./calc  input
-stderr:
-syntax error
-./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-./calc.at:1387: "$PERL" -pi -e 'use strict;
+./calc.at:1389: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86677,16 +86949,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1387: cat stderr
-input:
-  | error
-./calc.at:1387:  $PREPARSER ./calc  input
-stderr:
-syntax error
-./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error
-./calc.at:1387: "$PERL" -pi -e 'use strict;
+./calc.at:1390: cat stderr
+./calc.at:1374: cat stderr
+stdout:
+./calc.at:1387: cat stderr
+./calc.at:1375: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86696,11 +86964,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1387: cat stderr
-stderr:
-stdout:
+./calc.at:1390:  $PREPARSER ./calc  /dev/null
 input:
-./calc.at:1389: "$PERL" -ne '
+./calc.at:1391: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -86709,16 +86975,30 @@
         || /\s$/
         # No tabs.
         || /\t/
-        )' calc.c calc.h
+        )' calc.c
 
-  | 1 = 2 = 3
-./calc.at:1387:  $PREPARSER ./calc  input
 stderr:
-syntax error
-./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.1: syntax error
+  | (# + 1) = 1111
+./calc.at:1374:  $PREPARSER ./calc  input
+./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
 stderr:
-syntax error
+./calc.at:1389: cat stderr
+syntax error: invalid character: '#'
+input:
+./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (!!) + (1 2) = 1
+./calc.at:1387:  $PREPARSER ./calc  input
+stderr:
+syntax error: invalid character: '#'
+./calc.at:1375: cat stderr
+stderr:
+stderr:
+1.1: syntax error
+input:
+  | (!!) + (1 2) = 1
+./calc.at:1389:  $PREPARSER ./calc  input
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -86732,12 +87012,29 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1389:  $PREPARSER ./calc  input
+./calc.at:1391:  $PREPARSER ./calc  input
+syntax error
+error: 2222 != 1
 stderr:
+./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1390: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+syntax error
+error: 2222 != 1
 ./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 input:
-./calc.at:1387: "$PERL" -pi -e 'use strict;
+stderr:
+./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1374: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86747,20 +87044,45 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | 1 2
-./calc.at:1389:  $PREPARSER ./calc  input
+  | (1 + # + 1) = 1111
+./calc.at:1390: cat stderr
+./calc.at:1375:  $PREPARSER ./calc  input
 stderr:
-syntax error
-./calc.at:1387: cat stderr
-./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 syntax error
+error: 2222 != 1
+syntax error
+error: 2222 != 1
 input:
-  | 
-  | +1
-./calc.at:1387:  $PREPARSER ./calc  input
 stderr:
-syntax error
+input:
+1.6: syntax error: invalid character: '#'
+./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1390:  $PREPARSER ./calc  input
+stderr:
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 2
+./calc.at:1391:  $PREPARSER ./calc  input
+stderr:
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+stderr:
+1.3: syntax error
+stderr:
+./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1374: cat stderr
+1.6: syntax error: invalid character: '#'
+stderr:
+1.3: syntax error
 ./calc.at:1389: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -86771,10 +87093,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-./calc.at:1389: cat stderr
 ./calc.at:1387: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -86785,22 +87103,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | 1//2
-./calc.at:1389:  $PREPARSER ./calc  input
-stderr:
-syntax error
-./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1387: cat stderr
-stderr:
-./calc.at:1387:  $PREPARSER ./calc  /dev/null
-syntax error
-stderr:
-syntax error
-./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-./calc.at:1389: "$PERL" -pi -e 'use strict;
+./calc.at:1375: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86810,8 +87113,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1389: cat stderr
-./calc.at:1387: "$PERL" -pi -e 'use strict;
+./calc.at:1390: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86822,26 +87124,44 @@
   }eg
 ' expout || exit 77
 input:
-  | error
-./calc.at:1389:  $PREPARSER ./calc  input
+./calc.at:1391: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | (1 + # + 1) = 1111
+./calc.at:1374:  $PREPARSER ./calc  input
+./calc.at:1390: cat stderr
+./calc.at:1391: cat stderr
 stderr:
 ./calc.at:1387: cat stderr
-syntax error
-./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1375: cat stderr
+input:
+syntax error: invalid character: '#'
+./calc.at:1389: cat stderr
+./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1//2
 stderr:
-syntax error
+./calc.at:1391:  $PREPARSER ./calc  input
+syntax error: invalid character: '#'
 input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1387:  $PREPARSER ./calc  input
 stderr:
-syntax error
-syntax error
-syntax error
-syntax error
-error: 4444 != 1
-./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (!!) + (1 2) = 1
+./calc.at:1390:  $PREPARSER ./calc  input
+input:
+1.3: syntax error
+./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1389: "$PERL" -pi -e 'use strict;
+stderr:
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1375:  $PREPARSER ./calc  input
+./calc.at:1374: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86851,13 +87171,18 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-syntax error
-syntax error
-syntax error
-syntax error
-error: 4444 != 1
-./calc.at:1389: cat stderr
-./calc.at:1387: "$PERL" -pi -e 'use strict;
+1.11: syntax error
+1.1-16: error: 2222 != 1
+stderr:
+./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+1.3: syntax error
+stderr:
+1.11-17: error: null divisor
+./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.11: syntax error
+1.1-16: error: 2222 != 1
+./calc.at:1391: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86867,28 +87192,43 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | 1 = 2 = 3
+  | (- *) + (1 2) = 1
 ./calc.at:1389:  $PREPARSER ./calc  input
-./calc.at:1387: cat stderr
+  | (- *) + (1 2) = 1
+./calc.at:1387:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1390: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
-syntax error
-./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stdout:
+./calc.at:1374: cat stderr
 stderr:
-input:
-./types.at:139:  $PREPARSER ./test
 syntax error
+syntax error
+error: 2222 != 1
+./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1391: cat stderr
+syntax error
+syntax error
+error: 2222 != 1
 stderr:
-  | (!!) + (1 2) = 1
-./calc.at:1387:  $PREPARSER ./calc  input
+./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.11-17: error: null divisor
+syntax error
+syntax error
+error: 2222 != 1
 stderr:
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
 syntax error
 error: 2222 != 1
-./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1389: "$PERL" -pi -e 'use strict;
+./calc.at:1375: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86898,11 +87238,14 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-451. types.at:139:  ok
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1374:  $PREPARSER ./calc  input
+./calc.at:1375: cat stderr
 stderr:
-syntax error
-error: 2222 != 1
-./calc.at:1389: cat stderr
+error: null divisor
+./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
 ./calc.at:1387: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -86913,27 +87256,37 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-
+stderr:
 input:
-  | 
-  | +1
-./calc.at:1389:  $PREPARSER ./calc  input
+  | 123
+./calc.at:1375:  $PREPARSER ./calc --num input
+  | error
+./calc.at:1391:  $PREPARSER ./calc  input
+error: null divisor
+stderr:
+1.1: syntax error
+./calc.at:1390: cat stderr
+./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1389: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1387: cat stderr
-syntax error
-./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error
+./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 input:
+./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+1.1: syntax error
   | (- *) + (1 2) = 1
-./calc.at:1387:  $PREPARSER ./calc  input
-stderr:
-syntax error
-syntax error
-error: 2222 != 1
-./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1389: "$PERL" -pi -e 'use strict;
+./calc.at:1391: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86943,15 +87296,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-syntax error
-syntax error
-error: 2222 != 1
-506. calc.at:1394: testing Calculator %glr-parser %verbose  ...
-./calc.at:1394: mv calc.y.tmp calc.y
-
-./calc.at:1394: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-./calc.at:1389: cat stderr
-./calc.at:1387: "$PERL" -pi -e 'use strict;
+./calc.at:1390:  $PREPARSER ./calc  input
+input:
+input:
+./calc.at:1374: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86961,22 +87309,38 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1389:  $PREPARSER ./calc  /dev/null
-stderr:
-syntax error
-./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1389: cat stderr
 stderr:
-./calc.at:1387: cat stderr
-syntax error
-input:
+  | 1 + 2 * 3
+./calc.at:1375:  $PREPARSER ./calc --num input
   | (* *) + (*) + (*)
 ./calc.at:1387:  $PREPARSER ./calc  input
 stderr:
+stderr:
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 syntax error
 syntax error
 syntax error
 ./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1389: "$PERL" -pi -e 'use strict;
+./calc.at:1391: cat stderr
+stderr:
+stderr:
+1.3: syntax error, unexpected '+', expecting end of file
+syntax error
+syntax error
+syntax error
+./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+stderr:
+input:
+1.3: syntax error, unexpected '+', expecting end of file
+  | (* *) + (*) + (*)
+./calc.at:1387: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -86986,12 +87350,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-syntax error
-syntax error
-syntax error
-./calc.at:1389: cat stderr
-./calc.at:1387: "$PERL" -pi -e 'use strict;
+./calc.at:1389:  $PREPARSER ./calc  input
+./calc.at:1390: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87001,31 +87361,51 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1394: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
+stderr:
 input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1389:  $PREPARSER ./calc  input
+./calc.at:1374: cat stderr
+./calc.at:1375: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | 1 = 2 = 3
+./calc.at:1391:  $PREPARSER ./calc  input
 ./calc.at:1387: cat stderr
-stderr:
-syntax error
 syntax error
 syntax error
 syntax error
-error: 4444 != 1
 ./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1387:  $PREPARSER ./calc  input
-syntax error
+./calc.at:1390: cat stderr
+1.7: syntax error
+stderr:
+./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 123
+input:
+./calc.at:1374:  $PREPARSER ./calc --num input
 syntax error
 syntax error
 syntax error
-error: 4444 != 1
+./calc.at:1375: cat stderr
 stderr:
-./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1387:  $PREPARSER ./calc  input
 stderr:
 input:
+stderr:
+./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.7: syntax error
+./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (* *) + (*) + (*)
+./calc.at:1390:  $PREPARSER ./calc  input
+stderr:
 ./calc.at:1389: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -87036,16 +87416,55 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+./calc.at:1391: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+input:
+  | 1 + 2 * 3
+./calc.at:1375:  $PREPARSER ./calc --exp input
+input:
+input:
   | 1 + 2 * 3 + !- ++
-./calc.at:1387:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1387:  $PREPARSER ./calc  input
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
 stderr:
 ./calc.at:1389: cat stderr
+  | 1 + 2 * 3
+./calc.at:1374:  $PREPARSER ./calc --num input
+stderr:
+stderr:
+syntax error
+./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | (!!) + (1 2) = 1
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1389:  $PREPARSER ./calc  input
-./calc.at:1387: "$PERL" -pi -e 'use strict;
+stderr:
+stderr:
+syntax error
+./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+stderr:
+stderr:
+./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1390: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87055,21 +87474,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-syntax error
-error: 2222 != 1
-./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-error: 2222 != 1
-./calc.at:1387: cat stderr
+./calc.at:1391: cat stderr
 input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1387:  $PREPARSER ./calc  input
 stderr:
-memory exhausted
-./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1389: "$PERL" -pi -e 'use strict;
+  | 
+  | +1
+./calc.at:1391:  $PREPARSER ./calc  input
+./calc.at:1374: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87080,24 +87491,8 @@
   }eg
 ' expout || exit 77
 stderr:
-memory exhausted
-stderr:
-./calc.at:1389: cat stderr
-stdout:
-./calc.at:1390: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c
-
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1389:  $PREPARSER ./calc  input
+499. calc.at:1375:  ok
+2.1: syntax error
 ./calc.at:1387: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -87108,38 +87503,49 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+./calc.at:1390: cat stderr
+  | 1 + 2 * 3 + !- ++
+./calc.at:1389:  $PREPARSER ./calc  input
 stderr:
-syntax error
-syntax error
-error: 2222 != 1
-./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
+./calc.at:1374: cat stderr
+2.1: syntax error
 stderr:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1387: cat stderr
+  | 1 + 2 * 3 + !+ ++
 ./calc.at:1390:  $PREPARSER ./calc  input
-syntax error
-syntax error
-error: 2222 != 1
+./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1387: cat stderr
+input:
 stderr:
-./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+  | 1 + 2 * 3
+./calc.at:1374:  $PREPARSER ./calc --exp input
+
+stderr:
+./calc.at:1391: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | (#) + (#) = 2222
+stderr:
+  | 1 + 2 * 3 + !* ++
 ./calc.at:1387:  $PREPARSER ./calc  input
 stderr:
+stderr:
+./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+memory exhausted
+./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 input:
 ./calc.at:1389: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
@@ -87151,20 +87557,22 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1 2
+memory exhausted
+./calc.at:1391: cat stderr
+  | 1 + 2 * 3 + !- ++
 ./calc.at:1390:  $PREPARSER ./calc  input
+498. calc.at:1374:  ok
 stderr:
+./calc.at:1391:  $PREPARSER ./calc  /dev/null
 stderr:
-1.3: syntax error
 ./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1389: cat stderr
+1.1: syntax error
+./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.3: syntax error
+506. calc.at:1394: testing Calculator %glr-parser %verbose  ...
+./calc.at:1389: cat stderr
+./calc.at:1394: mv calc.y.tmp calc.y
+
 ./calc.at:1387: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -87175,7 +87583,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+1.1: syntax error
+stderr:
 input:
+
+  | 1 + 2 * 3 + !* ++
+./calc.at:1389:  $PREPARSER ./calc  input
+./calc.at:1394: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+stderr:
+memory exhausted
+./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1390: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -87186,34 +87603,30 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | (* *) + (*) + (*)
-./calc.at:1389:  $PREPARSER ./calc  input
-stderr:
-syntax error
-syntax error
-syntax error
-./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1390: cat stderr
+./calc.at:1391: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1387: cat stderr
 stderr:
-syntax error
-syntax error
-syntax error
-input:
+memory exhausted
+./calc.at:1391: cat stderr
 input:
-  | 1//2
-./calc.at:1390:  $PREPARSER ./calc  input
-  | (1 + #) = 1111
+./calc.at:1390: cat stderr
+  | (#) + (#) = 2222
 ./calc.at:1387:  $PREPARSER ./calc  input
 stderr:
-1.3: syntax error
-./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+input:
 ./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.3: syntax error
-stderr:
+input:
 ./calc.at:1389: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -87224,8 +87637,37 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+  | 1 + 2 * 3 + !* ++
+stderr:
+./calc.at:1390:  $PREPARSER ./calc  input
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 syntax error: invalid character: '#'
-./calc.at:1390: "$PERL" -pi -e 'use strict;
+syntax error: invalid character: '#'
+./calc.at:1391:  $PREPARSER ./calc  input
+507. calc.at:1395: testing Calculator %glr-parser parse.error=verbose  ...
+stderr:
+./calc.at:1395: mv calc.y.tmp calc.y
+
+stderr:
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.14: memory exhausted
+./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1395: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+./calc.at:1389: cat stderr
+stderr:
+stderr:
+1.14: memory exhausted
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+./calc.at:1387: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87235,8 +87677,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1390: cat stderr
-./calc.at:1387: "$PERL" -pi -e 'use strict;
+./calc.at:1394: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
+./calc.at:1390: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87246,27 +87688,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1389: cat stderr
-input:
-  | error
-./calc.at:1390:  $PREPARSER ./calc  input
 input:
-stderr:
-1.1: syntax error
-./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1 + 2 * 3 + !+ ++
+  | (#) + (#) = 2222
 ./calc.at:1389:  $PREPARSER ./calc  input
-./calc.at:1387: cat stderr
-stderr:
-stderr:
-./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.1: syntax error
-stderr:
-input:
-  | (# + 1) = 1111
-./calc.at:1387:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1390: "$PERL" -pi -e 'use strict;
+./calc.at:1391: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87276,33 +87701,30 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-syntax error: invalid character: '#'
-./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1 + 2 * 3 + !- ++
-./calc.at:1389:  $PREPARSER ./calc  input
-stderr:
 stderr:
 syntax error: invalid character: '#'
+syntax error: invalid character: '#'
 ./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1387: cat stderr
 ./calc.at:1390: cat stderr
 stderr:
 input:
-./calc.at:1387: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-  | 1 = 2 = 3
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+input:
+  | (1 + #) = 1111
+./calc.at:1387:  $PREPARSER ./calc  input
+./calc.at:1391: cat stderr
+  | (#) + (#) = 2222
 ./calc.at:1390:  $PREPARSER ./calc  input
 stderr:
-1.7: syntax error
+stderr:
+input:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
 ./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1395: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
+syntax error: invalid character: '#'
 ./calc.at:1389: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -87313,20 +87735,22 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1387: cat stderr
-input:
+./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (!!) + (1 2) = 1
+stderr:
+./calc.at:1391:  $PREPARSER ./calc  input
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
 ./calc.at:1389: cat stderr
-  | (1 + # + 1) = 1111
-./calc.at:1387:  $PREPARSER ./calc  input
 stderr:
 stderr:
-1.7: syntax error
 syntax error: invalid character: '#'
-./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
+1.11: syntax error
+1.1-16: error: 2222 != 1
+./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
-  | 1 + 2 * 3 + !* ++
+  | (1 + #) = 1111
 ./calc.at:1389:  $PREPARSER ./calc  input
 ./calc.at:1390: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
@@ -87338,8 +87762,20 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+1.11: syntax error
+1.1-16: error: 2222 != 1
 stderr:
-memory exhausted
+./calc.at:1391: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+syntax error: invalid character: '#'
 ./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1387: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
@@ -87352,12 +87788,15 @@
   }eg
 ' expout || exit 77
 stderr:
-memory exhausted
-./calc.at:1387: cat stderr
 ./calc.at:1390: cat stderr
+input:
+  | (1 + #) = 1111
+./calc.at:1390:  $PREPARSER ./calc  input
+stderr:
 stderr:
+1.6: syntax error: invalid character: '#'
+./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stdout:
-input:
 ./calc.at:1392: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
@@ -87369,14 +87808,40 @@
         || /\t/
         )' calc.c
 
-  | (1 + 1) / (1 - 1)
-./calc.at:1387:  $PREPARSER ./calc  input
-stderr:
-error: null divisor
-./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error: invalid character: '#'
+./calc.at:1387: cat stderr
 stderr:
-error: null divisor
+./calc.at:1391: cat stderr
+1.6: syntax error: invalid character: '#'
+input:
+  | (# + 1) = 1111
+./calc.at:1387:  $PREPARSER ./calc  input
 input:
+input:
+./calc.at:1390: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1389: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+syntax error: invalid character: '#'
+  | (- *) + (1 2) = 1
+./calc.at:1391:  $PREPARSER ./calc  input
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -87391,7 +87856,26 @@
   | 2^2^3 = 256
   | (2^2)^3 = 64
 ./calc.at:1392:  $PREPARSER ./calc  input
-./calc.at:1389: "$PERL" -pi -e 'use strict;
+stderr:
+./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+syntax error: invalid character: '#'
+./calc.at:1390: cat stderr
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+stderr:
+input:
+  | (# + 1) = 1111
+./calc.at:1390:  $PREPARSER ./calc  input
+./calc.at:1391: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87402,17 +87886,12 @@
   }eg
 ' expout || exit 77
 input:
+  | 1 2
+./calc.at:1392:  $PREPARSER ./calc  input
+./calc.at:1389: cat stderr
 stderr:
-  | 
-  | +1
-./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1390:  $PREPARSER ./calc  input
-stderr:
-stderr:
-input:
-2.1: syntax error
+1.2: syntax error: invalid character: '#'
 ./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1 2
 ./calc.at:1387: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -87423,32 +87902,21 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1392:  $PREPARSER ./calc  input
-./calc.at:1389: cat stderr
+input:
+  | (# + 1) = 1111
+./calc.at:1389:  $PREPARSER ./calc  input
 stderr:
 stderr:
+stderr:
+1.2: syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 syntax error
 ./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-stdout:
 stderr:
-./calc.at:1391: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c
-
 syntax error
-2.1: syntax error
-input:
-  | (#) + (#) = 2222
-./calc.at:1389:  $PREPARSER ./calc  input
-stderr:
+./calc.at:1387: cat stderr
 ./calc.at:1390: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -87459,27 +87927,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1387: cat stderr
-syntax error: invalid character: '#'
 syntax error: invalid character: '#'
-./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1391: cat stderr
 input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-500. calc.at:1387: ./calc.at:1391:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1392: "$PERL" -pi -e 'use strict;
+  | (1 + # + 1) = 1111
+./calc.at:1389: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87489,17 +87941,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1387:  $PREPARSER ./calc  input
+input:
 stderr:
- ok
 syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (* *) + (*) + (*)
+./calc.at:1391:  $PREPARSER ./calc  input
 ./calc.at:1390: cat stderr
 stderr:
-./calc.at:1390:  $PREPARSER ./calc  /dev/null
-stderr:
-./calc.at:1392: cat stderr
-./calc.at:1389: "$PERL" -pi -e 'use strict;
+./calc.at:1392: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87509,28 +87960,30 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.1: syntax error
-./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-
+./calc.at:1389: cat stderr
 stderr:
-  | 1 2
-./calc.at:1391:  $PREPARSER ./calc  input
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error: invalid character: '#'
+input:
 stderr:
-1.1: syntax error
-1.3: syntax error
+  | (1 + # + 1) = 1111
+./calc.at:1390:  $PREPARSER ./calc  input
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
 input:
-./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1389: cat stderr
-  | 1//2
-./calc.at:1392:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1392: cat stderr
+  | (1 + # + 1) = 1111
+./calc.at:1389:  $PREPARSER ./calc  input
+1.6: syntax error: invalid character: '#'
+./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.3: syntax error
-syntax error
-./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1390: "$PERL" -pi -e 'use strict;
+./calc.at:1391: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87540,11 +87993,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-syntax error
-  | (1 + #) = 1111
-./calc.at:1389:  $PREPARSER ./calc  input
-./calc.at:1391: "$PERL" -pi -e 'use strict;
+syntax error: invalid character: '#'
+./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1387: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87554,13 +88005,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+1.6: syntax error: invalid character: '#'
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1390: cat stderr
+  | 1//2
+./calc.at:1392:  $PREPARSER ./calc  input
 stderr:
 syntax error: invalid character: '#'
-./calc.at:1392: "$PERL" -pi -e 'use strict;
+syntax error
+./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1390: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87570,10 +88024,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
+stderr:
 ./calc.at:1391: cat stderr
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1390:  $PREPARSER ./calc  input
+syntax error
+./calc.at:1387: cat stderr
+input:
 ./calc.at:1389: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -87584,36 +88039,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-input:
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
-./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1//2
-./calc.at:1391:  $PREPARSER ./calc  input
-stderr:
-stderr:
-./calc.at:1392: cat stderr
-1.3: syntax error
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
-./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-507. calc.at:1395: testing Calculator %glr-parser parse.error=verbose  ...
-./calc.at:1389: cat stderr
-input:
-1.3: syntax error
-./calc.at:1395: mv calc.y.tmp calc.y
-
-  | error
-./calc.at:1392:  $PREPARSER ./calc  input
-./calc.at:1390: "$PERL" -pi -e 'use strict;
+./calc.at:1392: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87623,70 +88049,39 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1395: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1391:  $PREPARSER ./calc  input
+./calc.at:1390: cat stderr
 input:
-./calc.at:1391: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+  | (1 + 1) / (1 - 1)
+./calc.at:1387:  $PREPARSER ./calc  input
 stderr:
-  | (# + 1) = 1111
-./calc.at:1389:  $PREPARSER ./calc  input
-syntax error
-./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1390: cat stderr
+./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+error: null divisor
+./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1390:  $PREPARSER ./calc  input
 stderr:
 stderr:
-syntax error
-./calc.at:1391: cat stderr
-syntax error: invalid character: '#'
+error: null divisor
+1.11-17: error: null divisor
+./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1389: cat stderr
+./calc.at:1392: cat stderr
+stderr:
 input:
 input:
-  | (!!) + (1 2) = 1
-./calc.at:1390:  $PREPARSER ./calc  input
-./calc.at:1392: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 stderr:
-1.11: syntax error
-1.1-16: error: 2222 != 1
-./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1389: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
   | error
-./calc.at:1391:  $PREPARSER ./calc  input
-stderr:
+./calc.at:1392:  $PREPARSER ./calc  input
 stderr:
-1.11: syntax error
-1.1-16: error: 2222 != 1
-1.1: syntax error
-./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1392: cat stderr
-./calc.at:1390: "$PERL" -pi -e 'use strict;
+1.11-17: error: null divisor
+  | 1 + 2 * 3 + !- ++
+./calc.at:1391:  $PREPARSER ./calc  input
+syntax error
+./calc.at:1387: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87696,17 +88091,15 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-1.1: syntax error
+./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-./calc.at:1390: cat stderr
-./calc.at:1389: cat stderr
-  | 1 = 2 = 3
-./calc.at:1392:  $PREPARSER ./calc  input
 stderr:
 syntax error
-./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1391: "$PERL" -pi -e 'use strict;
+stderr:
+  | (1 + 1) / (1 - 1)
+./calc.at:1389:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1390: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87716,19 +88109,14 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-stderr:
-syntax error
-  | (1 + # + 1) = 1111
-./calc.at:1389:  $PREPARSER ./calc  input
-stderr:
-input:
-syntax error: invalid character: '#'
+./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+error: null divisor
 ./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-  | (- *) + (1 2) = 1
-./calc.at:1390:  $PREPARSER ./calc  input
-./calc.at:1392: "$PERL" -pi -e 'use strict;
+stderr:
+./calc.at:1387: cat stderr
+error: null divisor
+./calc.at:1389: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87738,26 +88126,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-syntax error: invalid character: '#'
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1391: cat stderr
-./calc.at:1395: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
-stderr:
-input:
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-  | 1 = 2 = 3
-./calc.at:1391:  $PREPARSER ./calc  input
-./calc.at:1392: cat stderr
-stderr:
-1.7: syntax error
-./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1389: "$PERL" -pi -e 'use strict;
+500. calc.at:1387:  ok
+./calc.at:1390: cat stderr
+./calc.at:1392: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87767,7 +88138,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1390: "$PERL" -pi -e 'use strict;
+./calc.at:1391: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87777,22 +88148,38 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1389: cat stderr
+502. calc.at:1390:  ok
+./calc.at:1392: cat stderr
+501. calc.at:1389:  ok
+./calc.at:1391: cat stderr
+
 input:
-stderr:
-  | 
-  | +1
+  | 1 = 2 = 3
+input:
+
 ./calc.at:1392:  $PREPARSER ./calc  input
-1.7: syntax error
+
+  | 1 + 2 * 3 + !* ++
+./calc.at:1391:  $PREPARSER ./calc  input
 stderr:
 syntax error
 ./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1389: cat stderr
+1.14: memory exhausted
+./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 syntax error
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1389:  $PREPARSER ./calc  input
-./calc.at:1391: "$PERL" -pi -e 'use strict;
+stderr:
+1.14: memory exhausted
+509. calc.at:1398: testing Calculator %glr-parser parse.error=verbose %locations  ...
+./calc.at:1398: mv calc.y.tmp calc.y
+
+stderr:
+./calc.at:1398: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+508. calc.at:1397: testing Calculator %glr-parser api.pure %locations  ...
+stdout:
+./calc.at:1392: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87802,15 +88189,21 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1390: cat stderr
-stderr:
-error: null divisor
-./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-input:
-./calc.at:1391: cat stderr
-error: null divisor
-./calc.at:1392: "$PERL" -pi -e 'use strict;
+./calc.at:1397: mv calc.y.tmp calc.y
+
+./calc.at:1397: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+./calc.at:1393: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c
+
+./calc.at:1391: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87821,24 +88214,58 @@
   }eg
 ' expout || exit 77
 input:
-  | (* *) + (*) + (*)
-./calc.at:1390:  $PREPARSER ./calc  input
+510. calc.at:1400: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose  ...
+./calc.at:1400: mv calc.y.tmp calc.y
+
+./calc.at:1392: cat stderr
+./calc.at:1391: cat stderr
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1393:  $PREPARSER ./calc  input
+./calc.at:1400: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+stderr:
+./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
 stderr:
   | 
   | +1
+./calc.at:1392:  $PREPARSER ./calc  input
+input:
+  | (#) + (#) = 2222
 ./calc.at:1391:  $PREPARSER ./calc  input
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1397: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
 stderr:
-2.1: syntax error
+syntax error
+./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
+  | 1 2
+./calc.at:1393:  $PREPARSER ./calc  input
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
 ./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
-2.1: syntax error
-./calc.at:1392: cat stderr
-./calc.at:1389: "$PERL" -pi -e 'use strict;
+syntax error
+syntax error
+./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+stderr:
+syntax error
+./calc.at:1391: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87848,10 +88275,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-./calc.at:1391: "$PERL" -pi -e 'use strict;
+./calc.at:1398: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
+./calc.at:1392: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87861,8 +88286,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1392:  $PREPARSER ./calc  /dev/null
-./calc.at:1390: "$PERL" -pi -e 'use strict;
+./calc.at:1391: cat stderr
+./calc.at:1400: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
+./calc.at:1393: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87872,20 +88298,26 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1392: cat stderr
+./calc.at:1392:  $PREPARSER ./calc  /dev/null
+input:
+stderr:
+  | (1 + #) = 1111
+./calc.at:1391:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1389: cat stderr
-./calc.at:1391: cat stderr
 syntax error
 ./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-501. calc.at:1389:  ok
+1.6: syntax error: invalid character: '#'
+./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1390: cat stderr
-./calc.at:1391:  $PREPARSER ./calc  /dev/null
+./calc.at:1393: cat stderr
 syntax error
 stderr:
+1.6: syntax error: invalid character: '#'
 input:
-1.1: syntax error
-./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1//2
+./calc.at:1393:  $PREPARSER ./calc  input
+stderr:
 ./calc.at:1392: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -87896,15 +88328,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1390:  $PREPARSER ./calc  input
-stderr:
-stderr:
-./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-
-1.1: syntax error
-stderr:
-input:
+syntax error
+./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1391: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -87915,13 +88340,14 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | 1 + 2 * 3 + !- ++
-./calc.at:1390:  $PREPARSER ./calc  input
 stderr:
+syntax error
 ./calc.at:1392: cat stderr
-./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1390: "$PERL" -pi -e 'use strict;
+./calc.at:1391: cat stderr
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1392:  $PREPARSER ./calc  input
+./calc.at:1393: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87932,9 +88358,9 @@
   }eg
 ' expout || exit 77
 input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1392:  $PREPARSER ./calc  input
-./calc.at:1391: cat stderr
+  | (# + 1) = 1111
+./calc.at:1391:  $PREPARSER ./calc  input
+stderr:
 stderr:
 syntax error
 syntax error
@@ -87942,35 +88368,21 @@
 syntax error
 error: 4444 != 1
 ./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error: invalid character: '#'
+./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1393: cat stderr
 stderr:
 syntax error
 syntax error
 syntax error
 syntax error
 error: 4444 != 1
-./calc.at:1390: cat stderr
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1391:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
-./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+1.2: syntax error: invalid character: '#'
 input:
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
-508. calc.at:1397: testing Calculator %glr-parser api.pure %locations  ...
-./calc.at:1397: mv calc.y.tmp calc.y
-
-./calc.at:1397: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-./calc.at:1392: "$PERL" -pi -e 'use strict;
+  | error
+./calc.at:1393:  $PREPARSER ./calc  input
+./calc.at:1391: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87980,10 +88392,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | 1 + 2 * 3 + !* ++
-./calc.at:1390:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1391: "$PERL" -pi -e 'use strict;
+./calc.at:1392: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -87993,21 +88403,22 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.14: memory exhausted
-./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
+./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1391: cat stderr
+syntax error
 ./calc.at:1392: cat stderr
+input:
+  | (1 + # + 1) = 1111
+./calc.at:1391:  $PREPARSER ./calc  input
 stderr:
-1.14: memory exhausted
 input:
-./calc.at:1391: cat stderr
+1.6: syntax error: invalid character: '#'
+./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | (!!) + (1 2) = 1
 ./calc.at:1392:  $PREPARSER ./calc  input
-stderr:
-syntax error
-error: 2222 != 1
-./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-./calc.at:1390: "$PERL" -pi -e 'use strict;
+./calc.at:1393: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88017,23 +88428,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | (!!) + (1 2) = 1
-./calc.at:1391:  $PREPARSER ./calc  input
 stderr:
 stderr:
-1.11: syntax error
-1.1-16: error: 2222 != 1
-./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 syntax error
 error: 2222 != 1
-./calc.at:1390: cat stderr
-stderr:
-1.11: syntax error
-1.1-16: error: 2222 != 1
-input:
-  | (#) + (#) = 2222
-./calc.at:1390:  $PREPARSER ./calc  input
+./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.6: syntax error: invalid character: '#'
 stderr:
+./calc.at:1393: cat stderr
+syntax error
+error: 2222 != 1
 ./calc.at:1391: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -88044,9 +88448,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 1 = 2 = 3
+./calc.at:1393:  $PREPARSER ./calc  input
+stderr:
+syntax error
+./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1391: cat stderr
 ./calc.at:1392: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -88058,12 +88466,16 @@
   }eg
 ' expout || exit 77
 stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1397: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
-./calc.at:1391: cat stderr
+syntax error
+input:
 ./calc.at:1392: cat stderr
-./calc.at:1390: "$PERL" -pi -e 'use strict;
+  | (1 + 1) / (1 - 1)
+./calc.at:1391:  $PREPARSER ./calc  input
+stderr:
+1.11-17: error: null divisor
+./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+./calc.at:1393: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88073,29 +88485,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1391:  $PREPARSER ./calc  input
   | (- *) + (1 2) = 1
 ./calc.at:1392:  $PREPARSER ./calc  input
 stderr:
 stderr:
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1390: cat stderr
+1.11-17: error: null divisor
 syntax error
 syntax error
 error: 2222 != 1
 ./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-input:
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-stderr:
 ./calc.at:1391: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -88109,26 +88508,10 @@
 syntax error
 syntax error
 error: 2222 != 1
-  | (1 + #) = 1111
-./calc.at:1390:  $PREPARSER ./calc  input
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1392: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
+./calc.at:1393: cat stderr
 ./calc.at:1391: cat stderr
-1.6: syntax error: invalid character: '#'
 input:
-./calc.at:1390: "$PERL" -pi -e 'use strict;
+./calc.at:1392: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88138,30 +88521,26 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | (* *) + (*) + (*)
-./calc.at:1391:  $PREPARSER ./calc  input
+  | 
+  | +1
+./calc.at:1393:  $PREPARSER ./calc  input
+503. calc.at:1391:  ok
 stderr:
+syntax error
+./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1392: cat stderr
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1390: cat stderr
-input:
 stderr:
+syntax error
+
+input:
   | (* *) + (*) + (*)
 ./calc.at:1392:  $PREPARSER ./calc  input
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
 stderr:
 syntax error
 syntax error
 syntax error
 ./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-input:
-./calc.at:1391: "$PERL" -pi -e 'use strict;
+./calc.at:1393: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88171,16 +88550,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
 syntax error
 syntax error
 syntax error
-  | (# + 1) = 1111
-./calc.at:1390:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error: invalid character: '#'
-./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1391: cat stderr
+./calc.at:1393: cat stderr
+./calc.at:1393:  $PREPARSER ./calc  /dev/null
+511. calc.at:1401: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose api.pure  ...
 stderr:
+./calc.at:1401: mv calc.y.tmp calc.y
+
 ./calc.at:1392: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -88191,10 +88570,17 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.2: syntax error: invalid character: '#'
+syntax error
+./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1401: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+syntax error
+./calc.at:1392: cat stderr
 input:
+stderr:
   | 1 + 2 * 3 + !+ ++
-./calc.at:1390: "$PERL" -pi -e 'use strict;
+./calc.at:1392:  $PREPARSER ./calc  input
+./calc.at:1393: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88204,26 +88590,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1391:  $PREPARSER ./calc  input
-./calc.at:1392: cat stderr
-stderr:
-./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1390: cat stderr
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1392:  $PREPARSER ./calc  input
-stderr:
 stderr:
-./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-input:
-input:
-input:
 stdout:
-  | 1 + 2 * 3 + !- ++
-  | (1 + # + 1) = 1111
-./calc.at:1393: "$PERL" -ne '
+./calc.at:1394: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -88234,17 +88603,9 @@
         || /\t/
         )' calc.c
 
-./calc.at:1392:  $PREPARSER ./calc  input
-./calc.at:1390:  $PREPARSER ./calc  input
-  | 1 + 2 * 3 + !- ++
-./calc.at:1391:  $PREPARSER ./calc  input
-stderr:
-stderr:
+./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 input:
-1.6: syntax error: invalid character: '#'
-./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -88258,18 +88619,41 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1393:  $PREPARSER ./calc  input
+./calc.at:1394:  $PREPARSER ./calc  input
+input:
+./calc.at:1393: cat stderr
 stderr:
+./calc.at:1401: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
+input:
+./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1393:  $PREPARSER ./calc  input
 stderr:
+syntax error
+syntax error
+syntax error
+syntax error
+error: 4444 != 1
 ./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 + !- ++
+./calc.at:1392:  $PREPARSER ./calc  input
 stderr:
 stderr:
-1.6: syntax error: invalid character: '#'
 stderr:
+./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
+syntax error
+syntax error
+syntax error
+error: 4444 != 1
 input:
   | 1 2
-./calc.at:1390: "$PERL" -pi -e 'use strict;
+stderr:
+./calc.at:1394:  $PREPARSER ./calc  input
+stderr:
+syntax error
+./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1393: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88279,8 +88663,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1393:  $PREPARSER ./calc  input
-./calc.at:1391: "$PERL" -pi -e 'use strict;
+stderr:
+syntax error
+./calc.at:1392: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88290,8 +88675,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-./calc.at:1392: "$PERL" -pi -e 'use strict;
+./calc.at:1393: cat stderr
+./calc.at:1394: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88301,49 +88686,35 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-syntax error
-./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1392: cat stderr
+input:
+  | (!!) + (1 2) = 1
+./calc.at:1393:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1391: cat stderr
-syntax error
 input:
-./calc.at:1390: cat stderr
+syntax error
+error: 2222 != 1
+./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 1 + 2 * 3 + !* ++
-input:
 ./calc.at:1392:  $PREPARSER ./calc  input
-  | 1 + 2 * 3 + !* ++
-stderr:
-./calc.at:1391:  $PREPARSER ./calc  input
-memory exhausted
-./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1393: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-1.14: memory exhausted
+./calc.at:1394: cat stderr
 stderr:
-./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 memory exhausted
+./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
+error: 2222 != 1
 input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1390:  $PREPARSER ./calc  input
 stderr:
+  | 1//2
+./calc.at:1394:  $PREPARSER ./calc  input
 stderr:
-1.11-17: error: null divisor
-1.14: memory exhausted
-./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+memory exhausted
 stderr:
-1.11-17: error: null divisor
-./calc.at:1393: cat stderr
-./calc.at:1391: "$PERL" -pi -e 'use strict;
+syntax error
+./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stdout:
+./calc.at:1393: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88353,6 +88724,19 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1395: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c
+
+stderr:
+syntax error
 ./calc.at:1392: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -88364,11 +88748,24 @@
   }eg
 ' expout || exit 77
 input:
-  | 1//2
-./calc.at:1393:  $PREPARSER ./calc  input
-./calc.at:1392: cat stderr
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1395:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1390: "$PERL" -pi -e 'use strict;
+./calc.at:1393: cat stderr
+./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1394: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88378,34 +88775,48 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-syntax error
-./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1391: cat stderr
-input:
+./calc.at:1392: cat stderr
 stderr:
+input:
+input:
+input:
+  | (- *) + (1 2) = 1
+./calc.at:1393:  $PREPARSER ./calc  input
   | (#) + (#) = 2222
 ./calc.at:1392:  $PREPARSER ./calc  input
-./calc.at:1390: cat stderr
-input:
-syntax error
+  | 1 2
+./calc.at:1395:  $PREPARSER ./calc  input
+./calc.at:1394: cat stderr
+stderr:
+stderr:
 stderr:
 syntax error: invalid character: '#'
 syntax error: invalid character: '#'
+syntax error, unexpected number
 ./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (#) + (#) = 2222
-./calc.at:1391:  $PREPARSER ./calc  input
-502. calc.at:1390:  ok
+./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
+syntax error
+error: 2222 != 1
+./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
 stderr:
 stderr:
+stderr:
+syntax error, unexpected number
 syntax error: invalid character: '#'
 syntax error: invalid character: '#'
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
+syntax error
+error: 2222 != 1
+  | error
+./calc.at:1394:  $PREPARSER ./calc  input
 stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1393: "$PERL" -pi -e 'use strict;
+syntax error
+./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+syntax error
+./calc.at:1392: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88415,8 +88826,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-
-./calc.at:1392: "$PERL" -pi -e 'use strict;
+./calc.at:1395: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88426,7 +88836,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1391: "$PERL" -pi -e 'use strict;
+stderr:
+./calc.at:1393: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88436,34 +88847,115 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stdout:
+./calc.at:1397: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c
+
+input:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1397:  $PREPARSER ./calc  input
+./calc.at:1395: cat stderr
+input:
 ./calc.at:1393: cat stderr
-./calc.at:1391: cat stderr
+stderr:
+./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1//2
+./calc.at:1395:  $PREPARSER ./calc  input
 ./calc.at:1392: cat stderr
+stderr:
+input:
 input:
+stderr:
+./calc.at:1394: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 input:
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+./calc.at:1394: cat stderr
   | (1 + #) = 1111
-  | error
 ./calc.at:1392:  $PREPARSER ./calc  input
+./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 2
+./calc.at:1397:  $PREPARSER ./calc  input
 input:
+stderr:
+  | (* *) + (*) + (*)
 ./calc.at:1393:  $PREPARSER ./calc  input
 stderr:
-  | (1 + #) = 1111
-./calc.at:1391:  $PREPARSER ./calc  input
-syntax error: invalid character: '#'
-./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+  | 1 = 2 = 3
+./calc.at:1394:  $PREPARSER ./calc  input
 stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.3: syntax error
+./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+stderr:
+syntax error: invalid character: '#'
 syntax error
-./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+syntax error
 stderr:
-1.6: syntax error: invalid character: '#'
+1.3: syntax error
 stderr:
 syntax error
+syntax error
+syntax error
+./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 syntax error: invalid character: '#'
-./calc.at:1391: "$PERL" -pi -e 'use strict;
+syntax error
+syntax error
+syntax error
+./calc.at:1397: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1395: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1394: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88473,6 +88965,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1395: cat stderr
 ./calc.at:1392: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -88483,7 +88976,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1391: cat stderr
 ./calc.at:1393: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -88494,28 +88986,42 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-509. calc.at:1398: testing Calculator %glr-parser parse.error=verbose %locations  ...
-./calc.at:1398: mv calc.y.tmp calc.y
-
-./calc.at:1398: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+./calc.at:1397: cat stderr
+input:
+./calc.at:1394: cat stderr
+  | error
+./calc.at:1395:  $PREPARSER ./calc  input
 input:
-  | (# + 1) = 1111
-./calc.at:1392: cat stderr
-./calc.at:1391:  $PREPARSER ./calc  input
 stderr:
+  | 1//2
+./calc.at:1397:  $PREPARSER ./calc  input
 ./calc.at:1393: cat stderr
-1.2: syntax error: invalid character: '#'
-./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1392: cat stderr
+syntax error, unexpected invalid token
+./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 input:
-1.2: syntax error: invalid character: '#'
+1.3: syntax error
+./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+syntax error, unexpected invalid token
+  | 
+  | +1
+./calc.at:1394:  $PREPARSER ./calc  input
+input:
+input:
+stderr:
+stderr:
+syntax error
+./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.3: syntax error
   | (# + 1) = 1111
 ./calc.at:1392:  $PREPARSER ./calc  input
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1393:  $PREPARSER ./calc  input
 stderr:
-input:
-syntax error: invalid character: '#'
-./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1391: "$PERL" -pi -e 'use strict;
+stderr:
+./calc.at:1397: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88525,22 +89031,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | 1 = 2 = 3
-./calc.at:1393:  $PREPARSER ./calc  input
-stderr:
 stderr:
-./calc.at:1391: cat stderr
-syntax error
 ./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
 syntax error: invalid character: '#'
-input:
-./calc.at:1398: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
-  | (1 + # + 1) = 1111
-./calc.at:1391:  $PREPARSER ./calc  input
+./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
 stderr:
-./calc.at:1393: "$PERL" -pi -e 'use strict;
+./calc.at:1395: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88550,7 +89047,14 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1392: "$PERL" -pi -e 'use strict;
+syntax error: invalid character: '#'
+stderr:
+./calc.at:1395: cat stderr
+./calc.at:1397: cat stderr
+input:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1393:  $PREPARSER ./calc  input
+./calc.at:1394: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88560,14 +89064,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.6: syntax error: invalid character: '#'
-./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1392: cat stderr
-1.6: syntax error: invalid character: '#'
-./calc.at:1393: cat stderr
-input:
-./calc.at:1391: "$PERL" -pi -e 'use strict;
+./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1392: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88577,27 +89076,39 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | (1 + # + 1) = 1111
-./calc.at:1392:  $PREPARSER ./calc  input
+./calc.at:1394: cat stderr
+input:
 input:
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 
-  | +1
+  | error
+./calc.at:1397:  $PREPARSER ./calc  input
+  | 1 = 2 = 3
+./calc.at:1392: cat stderr
+./calc.at:1395:  $PREPARSER ./calc  input
+./calc.at:1394:  $PREPARSER ./calc  /dev/null
 stderr:
-./calc.at:1393:  $PREPARSER ./calc  input
-syntax error: invalid character: '#'
 stderr:
 syntax error
-./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | (1 + # + 1) = 1111
+stderr:
+./calc.at:1392:  $PREPARSER ./calc  input
+1.1: syntax error
+./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1391: cat stderr
 syntax error
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1391:  $PREPARSER ./calc  input
-./calc.at:1392: "$PERL" -pi -e 'use strict;
+syntax error, unexpected '='
+./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+1.1: syntax error
+syntax error: invalid character: '#'
+./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+syntax error: invalid character: '#'
+./calc.at:1397: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88607,9 +89118,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-1.11-17: error: null divisor
-./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error, unexpected '='
 ./calc.at:1393: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -88620,10 +89129,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1392: cat stderr
-stderr:
-1.11-17: error: null divisor
-./calc.at:1391: "$PERL" -pi -e 'use strict;
+./calc.at:1394: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88633,24 +89139,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1393: cat stderr
-input:
-./calc.at:1393:  $PREPARSER ./calc  /dev/null
-  | (1 + 1) / (1 - 1)
-./calc.at:1392:  $PREPARSER ./calc  input
-stderr:
-syntax error
-./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1391: cat stderr
-error: null divisor
-./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-503. calc.at:1391: stderr:
-stderr:
-error: null divisor
- ok
-syntax error
-./calc.at:1393: "$PERL" -pi -e 'use strict;
+./calc.at:1395: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88660,6 +89149,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1395: cat stderr
+./calc.at:1397: cat stderr
 ./calc.at:1392: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -88670,31 +89161,70 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-
+input:
 ./calc.at:1393: cat stderr
+input:
+  | 
+  | +1
+./calc.at:1395:  $PREPARSER ./calc  input
+./calc.at:1394: cat stderr
+  | 1 = 2 = 3
+./calc.at:1397:  $PREPARSER ./calc  input
+stderr:
+syntax error, unexpected '+'
+stderr:
 ./calc.at:1392: cat stderr
-504. calc.at:1392:  ok
+./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
 input:
+stderr:
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1394:  $PREPARSER ./calc  input
+1.7: syntax error
+./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error, unexpected '+'
+stderr:
+  | 1 + 2 * 3 + !* ++
+1.7: syntax error
 ./calc.at:1393:  $PREPARSER ./calc  input
 stderr:
+input:
+stderr:
+./calc.at:1397: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+memory exhausted
+./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 syntax error
 syntax error
 syntax error
 syntax error
 error: 4444 != 1
-./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-
+./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (1 + 1) / (1 - 1)
+./calc.at:1392:  $PREPARSER ./calc  input
+stderr:
+memory exhausted
+stderr:
+stderr:
+error: null divisor
+./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 syntax error
 syntax error
 syntax error
 syntax error
 error: 4444 != 1
-510. calc.at:1400: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose  ...
-./calc.at:1400: mv calc.y.tmp calc.y
-
-./calc.at:1393: "$PERL" -pi -e 'use strict;
+error: null divisor
+./calc.at:1397: cat stderr
+./calc.at:1395: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88704,23 +89234,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1400: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-./calc.at:1393: cat stderr
 input:
-511. calc.at:1401: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose api.pure  ...
-./calc.at:1401: mv calc.y.tmp calc.y
-
-  | (!!) + (1 2) = 1
-./calc.at:1393:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1401: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-syntax error
-error: 2222 != 1
-./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-error: 2222 != 1
-./calc.at:1393: "$PERL" -pi -e 'use strict;
+./calc.at:1395: cat stderr
+./calc.at:1394: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88730,21 +89246,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1393: cat stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1393:  $PREPARSER ./calc  input
-stderr:
-syntax error
-syntax error
-error: 2222 != 1
-./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1400: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
-stderr:
-syntax error
-syntax error
-error: 2222 != 1
-./calc.at:1393: "$PERL" -pi -e 'use strict;
+./calc.at:1392: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88754,20 +89256,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1393: cat stderr
-./calc.at:1401: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
-input:
-  | (* *) + (*) + (*)
-./calc.at:1393:  $PREPARSER ./calc  input
-stderr:
-syntax error
-syntax error
-syntax error
-./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-syntax error
-syntax error
+  | 
+  | +1
+./calc.at:1397:  $PREPARSER ./calc  input
 ./calc.at:1393: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -88778,20 +89269,18 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1393: cat stderr
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1393:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1393:  $PREPARSER ./calc  input
+2.1: syntax error
+./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1395:  $PREPARSER ./calc  /dev/null
 stderr:
-./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1394: cat stderr
 stderr:
-./calc.at:1393: "$PERL" -pi -e 'use strict;
+2.1: syntax error
+syntax error, unexpected end of input
+./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1393: cat stderr
+./calc.at:1397: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88801,16 +89290,33 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1393: cat stderr
+stderr:
 input:
-  | 1 + 2 * 3 + !* ++
+./calc.at:1392: cat stderr
+  | (!!) + (1 2) = 1
+./calc.at:1394:  $PREPARSER ./calc  input
+input:
+syntax error, unexpected end of input
+  | (#) + (#) = 2222
 ./calc.at:1393:  $PREPARSER ./calc  input
 stderr:
-memory exhausted
+syntax error
+error: 2222 != 1
+./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+syntax error
+error: 2222 != 1
+504. calc.at:1392:  ok
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
 ./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1397: cat stderr
 stderr:
-memory exhausted
-./calc.at:1393: "$PERL" -pi -e 'use strict;
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1397:  $PREPARSER ./calc  /dev/null
+./calc.at:1395: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88820,18 +89326,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1393: cat stderr
-input:
-  | (#) + (#) = 2222
-./calc.at:1393:  $PREPARSER ./calc  input
-stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1393: "$PERL" -pi -e 'use strict;
+./calc.at:1394: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88841,15 +89336,15 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1393: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1393:  $PREPARSER ./calc  input
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
+1.1: syntax error
+./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.1: syntax error
+./calc.at:1395: cat stderr
+./calc.at:1394: cat stderr
 stderr:
-syntax error: invalid character: '#'
 ./calc.at:1393: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -88860,16 +89355,23 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1393: cat stderr
 input:
-  | (# + 1) = 1111
-./calc.at:1393:  $PREPARSER ./calc  input
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stdout:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1395:  $PREPARSER ./calc  input
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1393: "$PERL" -pi -e 'use strict;
+./calc.at:1400: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c calc.h
+
+./calc.at:1397: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88879,16 +89381,74 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+error: 4444 != 1
+input:
+  | (- *) + (1 2) = 1
+./calc.at:1394:  $PREPARSER ./calc  input
+./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+syntax error
+syntax error
+error: 2222 != 1
+./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1393: cat stderr
 input:
-  | (1 + # + 1) = 1111
+  | (1 + #) = 1111
 ./calc.at:1393:  $PREPARSER ./calc  input
+./calc.at:1397: cat stderr
+input:
+stderr:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+stderr:
+./calc.at:1400:  $PREPARSER ./calc  input
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+stderr:
+./calc.at:1397:  $PREPARSER ./calc  input
 stderr:
+syntax error
+syntax error
+error: 2222 != 1
 syntax error: invalid character: '#'
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+error: 4444 != 1
 ./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+512. calc.at:1402: testing Calculator %glr-parser parse.error=detailed %locations %header %name-prefix "calc" %verbose  ...
+./calc.at:1402: mv calc.y.tmp calc.y
+
+./calc.at:1402: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+input:
 stderr:
 syntax error: invalid character: '#'
-./calc.at:1393: "$PERL" -pi -e 'use strict;
+./calc.at:1395: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88898,16 +89458,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1393: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1393:  $PREPARSER ./calc  input
 stderr:
-error: null divisor
-./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 2
+./calc.at:1400:  $PREPARSER ./calc  input
 stderr:
-error: null divisor
-./calc.at:1393: "$PERL" -pi -e 'use strict;
+./calc.at:1394: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88917,17 +89472,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1393: cat stderr
-505. calc.at:1393:  ok
-
-512. calc.at:1402: testing Calculator %glr-parser parse.error=detailed %locations %header %name-prefix "calc" %verbose  ...
-./calc.at:1402: mv calc.y.tmp calc.y
-
-./calc.at:1402: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-./calc.at:1402: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
 stderr:
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
 stdout:
-./calc.at:1394: "$PERL" -ne '
+./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1398: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -88938,6 +89492,7 @@
         || /\t/
         )' calc.c
 
+./calc.at:1395: cat stderr
 input:
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
@@ -88952,19 +89507,8 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1394:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-input:
-  | 1 2
-./calc.at:1394:  $PREPARSER ./calc  input
-stderr:
-syntax error
-./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-./calc.at:1394: "$PERL" -pi -e 'use strict;
+./calc.at:1398:  $PREPARSER ./calc  input
+./calc.at:1397: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -88974,35 +89518,14 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1394: cat stderr
-input:
-  | 1//2
-./calc.at:1394:  $PREPARSER ./calc  input
 stderr:
-syntax error
-./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error
-./calc.at:1394: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 ./calc.at:1394: cat stderr
+1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | error
-./calc.at:1394:  $PREPARSER ./calc  input
 stderr:
-syntax error
-./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-./calc.at:1394: "$PERL" -pi -e 'use strict;
+./calc.at:1393: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89012,79 +89535,64 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1394: cat stderr
+./calc.at:1400: cat stderr
+  | (!!) + (1 2) = 1
+./calc.at:1395:  $PREPARSER ./calc  input
+./calc.at:1397: cat stderr
+stderr:
 input:
-  | 1 = 2 = 3
+input:
+input:
+  | (* *) + (*) + (*)
 ./calc.at:1394:  $PREPARSER ./calc  input
+  | 1//2
+./calc.at:1400:  $PREPARSER ./calc  input
 stderr:
-syntax error
-./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1393: cat stderr
+1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
+./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error
-./calc.at:1394: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1394: cat stderr
-input:
-  | 
-  | +1
-./calc.at:1394:  $PREPARSER ./calc  input
+syntax error, unexpected number
+error: 2222 != 1
+./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 syntax error
-./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 syntax error
-./calc.at:1394: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1394: cat stderr
-./calc.at:1394:  $PREPARSER ./calc  /dev/null
-stderr:
 syntax error
 ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
 stderr:
-syntax error
-./calc.at:1394: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1394: cat stderr
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1394:  $PREPARSER ./calc  input
+  | 1 2
 stderr:
+input:
+./calc.at:1398:  $PREPARSER ./calc  input
+syntax error, unexpected number
+error: 2222 != 1
+  | (!!) + (1 2) = 1
 syntax error
 syntax error
 syntax error
-syntax error
-error: 4444 != 1
-./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1397:  $PREPARSER ./calc  input
 stderr:
-syntax error
-syntax error
-syntax error
-syntax error
-error: 4444 != 1
+./calc.at:1402: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
+input:
+1.3: syntax error, unexpected number
+./calc.at:1400: cat stderr
+stderr:
+./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (# + 1) = 1111
+./calc.at:1393:  $PREPARSER ./calc  input
+1.11: syntax error
+1.1-16: error: 2222 != 1
+./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+1.3: syntax error, unexpected number
+syntax error: invalid character: '#'
+./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+syntax error: invalid character: '#'
+input:
 ./calc.at:1394: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -89095,18 +89603,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1394: cat stderr
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1394:  $PREPARSER ./calc  input
-stderr:
-syntax error
-error: 2222 != 1
-./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-error: 2222 != 1
-./calc.at:1394: "$PERL" -pi -e 'use strict;
+./calc.at:1395: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89116,20 +89613,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1394: cat stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1394:  $PREPARSER ./calc  input
-stderr:
-syntax error
-syntax error
-error: 2222 != 1
-./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-syntax error
-error: 2222 != 1
-./calc.at:1394: "$PERL" -pi -e 'use strict;
+./calc.at:1398: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89139,20 +89623,20 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+  | error
+./calc.at:1400:  $PREPARSER ./calc  input
+stderr:
 ./calc.at:1394: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1394:  $PREPARSER ./calc  input
+1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error
-syntax error
-syntax error
-./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1398: cat stderr
+./calc.at:1395: cat stderr
+1.11: syntax error
+1.1-16: error: 2222 != 1
+input:
 stderr:
-syntax error
-syntax error
-syntax error
-./calc.at:1394: "$PERL" -pi -e 'use strict;
+./calc.at:1393: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89162,22 +89646,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1394: cat stderr
 input:
   | 1 + 2 * 3 + !+ ++
+1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+  | (- *) + (1 2) = 1
+./calc.at:1395:  $PREPARSER ./calc  input
 ./calc.at:1394:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1394:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
-stdout:
-./calc.at:1394: "$PERL" -pi -e 'use strict;
+./calc.at:1397: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89187,52 +89665,23 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1395: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c
-
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1395:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1394: cat stderr
-input:
-  | 1 2
-./calc.at:1395:  $PREPARSER ./calc  input
-stderr:
-input:
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 syntax error, unexpected number
+error: 2222 != 1
 ./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1 + 2 * 3 + !* ++
-./calc.at:1394:  $PREPARSER ./calc  input
+./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1//2
 stderr:
+./calc.at:1398:  $PREPARSER ./calc  input
+./calc.at:1400: cat stderr
 stderr:
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 syntax error, unexpected number
-memory exhausted
-./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+error: 2222 != 1
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-memory exhausted
+./calc.at:1393: cat stderr
 ./calc.at:1395: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -89243,48 +89692,35 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1394: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1395: cat stderr
-./calc.at:1394: cat stderr
+./calc.at:1397: cat stderr
 input:
+stderr:
 input:
-  | 1//2
-./calc.at:1395:  $PREPARSER ./calc  input
-  | (#) + (#) = 2222
+  | 1 + 2 * 3 + !- ++
+  | (- *) + (1 2) = 1
 ./calc.at:1394:  $PREPARSER ./calc  input
-stderr:
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1397:  $PREPARSER ./calc  input
+input:
 stderr:
 stderr:
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
 ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1395: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+input:
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 = 2 = 3
+./calc.at:1400:  $PREPARSER ./calc  input
+  | (1 + # + 1) = 1111
+./calc.at:1393:  $PREPARSER ./calc  input
 ./calc.at:1395: cat stderr
-./calc.at:1394: "$PERL" -pi -e 'use strict;
+stderr:
+stderr:
+stderr:
+1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
+./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1398: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89294,23 +89730,33 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1394: cat stderr
+stderr:
+stderr:
+syntax error: invalid character: '#'
+./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
+./calc.at:1398: cat stderr
+./calc.at:1400: cat stderr
 input:
   | error
-./calc.at:1395:  $PREPARSER ./calc  input
+./calc.at:1398:  $PREPARSER ./calc  input
 input:
-  | (1 + #) = 1111
-./calc.at:1394:  $PREPARSER ./calc  input
+stderr:
+  | 
+  | +1
+1.1: syntax error, unexpected invalid token
+./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1400:  $PREPARSER ./calc  input
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
 stderr:
 stderr:
-syntax error, unexpected invalid token
-./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-syntax error: invalid character: '#'
-./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.1: syntax error, unexpected invalid token
 syntax error: invalid character: '#'
-stderr:
-syntax error, unexpected invalid token
 ./calc.at:1394: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -89321,7 +89767,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1395: "$PERL" -pi -e 'use strict;
+./calc.at:1397: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89331,24 +89777,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1395: cat stderr
-./calc.at:1394: cat stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1395:  $PREPARSER ./calc  input
-stderr:
-syntax error, unexpected '='
-./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | (# + 1) = 1111
-./calc.at:1394:  $PREPARSER ./calc  input
-stderr:
-syntax error, unexpected '='
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1394: cat stderr
 stderr:
-./calc.at:1395: "$PERL" -pi -e 'use strict;
+  | (* *) + (*) + (*)
+./calc.at:1395:  $PREPARSER ./calc  input
+./calc.at:1398: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89358,10 +89792,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-syntax error: invalid character: '#'
-./calc.at:1395: cat stderr
-input:
-./calc.at:1394: "$PERL" -pi -e 'use strict;
+./calc.at:1397: cat stderr
+./calc.at:1393: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89371,19 +89803,47 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | 
-  | +1
-./calc.at:1395:  $PREPARSER ./calc  input
+2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
 stderr:
-syntax error, unexpected '+'
+./calc.at:1398: cat stderr
+input:
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+  | 1 + 2 * 3 + !* ++
+./calc.at:1400: cat stderr
 ./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1394:  $PREPARSER ./calc  input
+stderr:
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 stderr:
-./calc.at:1394: cat stderr
-syntax error, unexpected '+'
 input:
-  | (1 + # + 1) = 1111
-./calc.at:1394:  $PREPARSER ./calc  input
+memory exhausted
+./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 = 2 = 3
+./calc.at:1398:  $PREPARSER ./calc  input
+./calc.at:1400:  $PREPARSER ./calc  /dev/null
+stderr:
+input:
+./calc.at:1393: cat stderr
+stderr:
+memory exhausted
+  | (* *) + (*) + (*)
+./calc.at:1397:  $PREPARSER ./calc  input
+stderr:
+stderr:
+1.7: syntax error, unexpected '='
+./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+input:
 ./calc.at:1395: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -89394,28 +89854,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-syntax error: invalid character: '#'
-./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1395: cat stderr
+  | (1 + 1) / (1 - 1)
+./calc.at:1393:  $PREPARSER ./calc  input
 stderr:
-stdout:
-syntax error: invalid character: '#'
-./calc.at:1397: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c
-
-./calc.at:1395:  $PREPARSER ./calc  /dev/null
 stderr:
-syntax error, unexpected end of input
-./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1394: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -89426,28 +89868,18 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+1.7: syntax error, unexpected '='
 stderr:
-input:
-syntax error, unexpected end of input
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1397:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+error: null divisor
+1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1395: cat stderr
 ./calc.at:1394: cat stderr
-stderr:
-./calc.at:1395: "$PERL" -pi -e 'use strict;
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+./calc.at:1400: cat stderr
+./calc.at:1398: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89457,49 +89889,20 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1394:  $PREPARSER ./calc  input
-input:
 stderr:
 error: null divisor
-./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1395: cat stderr
-  | 1 2
-./calc.at:1397:  $PREPARSER ./calc  input
-stderr:
 input:
-stderr:
+input:
+  | (#) + (#) = 2222
+./calc.at:1394:  $PREPARSER ./calc  input
+input:
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-error: null divisor
-./calc.at:1395:  $PREPARSER ./calc  input
-stderr:
-1.3: syntax error
-./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-error: 4444 != 1
-stderr:
-./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.3: syntax error
+./calc.at:1400:  $PREPARSER ./calc  input
+  | 1 + 2 * 3 + !+ ++
 stderr:
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-error: 4444 != 1
-./calc.at:1394: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1395:  $PREPARSER ./calc  input
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
 ./calc.at:1397: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -89510,8 +89913,37 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1398: cat stderr
+stderr:
+./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.1-46: error: 4444 != 1
+./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+stderr:
+input:
+stderr:
+  | 
+  | +1
+./calc.at:1398:  $PREPARSER ./calc  input
+stderr:
+2.1: syntax error, unexpected '+'
 ./calc.at:1397: cat stderr
-./calc.at:1395: "$PERL" -pi -e 'use strict;
+./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.1-46: error: 4444 != 1
+input:
+./calc.at:1393: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89521,29 +89953,19 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1394: cat stderr
-input:
-  | 1//2
-./calc.at:1397:  $PREPARSER ./calc  input
-./calc.at:1395: cat stderr
+./calc.at:1400: cat stderr
 stderr:
-506. calc.at:1394:  ok
-1.3: syntax error
-./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | (!!) + (1 2) = 1
-stderr:
+  | 1 + 2 * 3 + !- ++
 ./calc.at:1395:  $PREPARSER ./calc  input
+  | 1 + 2 * 3 + !+ ++
+2.1: syntax error, unexpected '+'
+./calc.at:1397:  $PREPARSER ./calc  input
 stderr:
-1.3: syntax error
-syntax error, unexpected number
-error: 2222 != 1
-./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-
-syntax error, unexpected number
-error: 2222 != 1
-./calc.at:1397: "$PERL" -pi -e 'use strict;
+./calc.at:1393: cat stderr
+input:
+./calc.at:1394: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89553,7 +89975,18 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1395: "$PERL" -pi -e 'use strict;
+./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (!!) + (1 2) = 1
+./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1400:  $PREPARSER ./calc  input
+stderr:
+stderr:
+stderr:
+1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-16: error: 2222 != 1
+./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1394: cat stderr
+505. calc.at:1393: ./calc.at:1398: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89563,28 +89996,32 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1397: cat stderr
-./calc.at:1395: cat stderr
+ ok
+stderr:
 input:
-  | error
+1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-16: error: 2222 != 1
+  | 1 + 2 * 3 + !- ++
 ./calc.at:1397:  $PREPARSER ./calc  input
+stderr:
 input:
-  | (- *) + (1 2) = 1
+  | (1 + #) = 1111
+./calc.at:1400: cat stderr
+./calc.at:1394:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1395:  $PREPARSER ./calc  input
-1.1: syntax error
-./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-513. calc.at:1403: testing Calculator %glr-parser parse.error=verbose %locations %header %name-prefix "calc" %verbose  ...
-./calc.at:1403: mv calc.y.tmp calc.y
+syntax error: invalid character: '#'
+./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 
 stderr:
+./calc.at:1398: cat stderr
+./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error: invalid character: '#'
+./calc.at:1398:  $PREPARSER ./calc  /dev/null
+input:
 stderr:
-1.1: syntax error
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected number
-error: 2222 != 1
-./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1397: "$PERL" -pi -e 'use strict;
+  | (- *) + (1 2) = 1
+./calc.at:1400:  $PREPARSER ./calc  input
+./calc.at:1395: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89595,12 +90032,21 @@
   }eg
 ' expout || exit 77
 stderr:
-./calc.at:1403: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected number
-error: 2222 != 1
-./calc.at:1397: cat stderr
-./calc.at:1395: "$PERL" -pi -e 'use strict;
+1.1: syntax error, unexpected end of input
+./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-17: error: 2222 != 1
+./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1395: cat stderr
+stderr:
+stderr:
+1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-17: error: 2222 != 1
+1.1: syntax error, unexpected end of input
+./calc.at:1394: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89610,28 +90056,14 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1395: cat stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1397:  $PREPARSER ./calc  input
-stderr:
 input:
-1.7: syntax error
-./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (* *) + (*) + (*)
+  | 1 + 2 * 3 + !* ++
 ./calc.at:1395:  $PREPARSER ./calc  input
+./calc.at:1400: cat stderr
 stderr:
-stderr:
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+memory exhausted
 ./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.7: syntax error
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-./calc.at:1397: "$PERL" -pi -e 'use strict;
+./calc.at:1398: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89641,7 +90073,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1395: "$PERL" -pi -e 'use strict;
+./calc.at:1397: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89651,26 +90083,33 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1395: cat stderr
-./calc.at:1397: cat stderr
-input:
+513. calc.at:1403: testing Calculator %glr-parser parse.error=verbose %locations %header %name-prefix "calc" %verbose  ...
+./calc.at:1403: mv calc.y.tmp calc.y
+
 input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1395:  $PREPARSER ./calc  input
-  | 
-  | +1
-./calc.at:1397:  $PREPARSER ./calc  input
-stderr:
 stderr:
-2.1: syntax error
-./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (* *) + (*) + (*)
+./calc.at:1400:  $PREPARSER ./calc  input
+memory exhausted
 stderr:
-./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-2.1: syntax error
+./calc.at:1398: cat stderr
+./calc.at:1394: cat stderr
+./calc.at:1403: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+./calc.at:1397: cat stderr
+1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 input:
-./calc.at:1403: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
-./calc.at:1397: "$PERL" -pi -e 'use strict;
+1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1398:  $PREPARSER ./calc  input
+input:
+input:
+./calc.at:1395: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89680,33 +90119,30 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | 1 + 2 * 3 + !- ++
-./calc.at:1395:  $PREPARSER ./calc  input
+  | 1 + 2 * 3 + !* ++
 stderr:
-./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1397: cat stderr
+./calc.at:1397:  $PREPARSER ./calc  input
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.1-46: error: 4444 != 1
+./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (# + 1) = 1111
+./calc.at:1400: cat stderr
 stderr:
-./calc.at:1397:  $PREPARSER ./calc  /dev/null
+./calc.at:1394:  $PREPARSER ./calc  input
 stderr:
-1.1: syntax error
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.1-46: error: 4444 != 1
+1.14: memory exhausted
 ./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-stderr:
-stdout:
-./types.at:139:  $PREPARSER ./test
-1.1: syntax error
-stderr:
-./calc.at:1395: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1397: "$PERL" -pi -e 'use strict;
+input:
+./calc.at:1398: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89716,36 +90152,35 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1395: cat stderr
-452. types.at:139:  ok
+syntax error: invalid character: '#'
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1400:  $PREPARSER ./calc  input
+./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1398: cat stderr
+stderr:
+./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
 input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1395:  $PREPARSER ./calc  input
+  | 1 + 2 * 3 + !- ++
+./calc.at:1400:  $PREPARSER ./calc  input
+syntax error: invalid character: '#'
 stderr:
-./calc.at:1397: cat stderr
-memory exhausted
-./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.14: memory exhausted
 input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1397:  $PREPARSER ./calc  input
-
-memory exhausted
+input:
+  | (#) + (#) = 2222
+./calc.at:1395:  $PREPARSER ./calc  input
+  | (!!) + (1 2) = 1
+./calc.at:1398:  $PREPARSER ./calc  input
+./calc.at:1403: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
 stderr:
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
-./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
-./calc.at:1395: "$PERL" -pi -e 'use strict;
+stderr:
+./calc.at:1394: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89765,30 +90200,29 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1395: cat stderr
-./calc.at:1397: cat stderr
-input:
-  | (#) + (#) = 2222
-./calc.at:1395:  $PREPARSER ./calc  input
-stderr:
 syntax error: invalid character: '#'
 syntax error: invalid character: '#'
-input:
+1.11: syntax error, unexpected number
+1.1-16: error: 2222 != 1
+./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (!!) + (1 2) = 1
-./calc.at:1397:  $PREPARSER ./calc  input
-514. calc.at:1405: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose  ...
-./calc.at:1405: mv calc.y.tmp calc.y
-
+./calc.at:1400: cat stderr
+stderr:
 stderr:
+input:
 syntax error: invalid character: '#'
 syntax error: invalid character: '#'
-stderr:
-1.11: syntax error
+1.11: syntax error, unexpected number
 1.1-16: error: 2222 != 1
-./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1405: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+  | 1 + 2 * 3 + !* ++
+./calc.at:1400:  $PREPARSER ./calc  input
+./calc.at:1394: cat stderr
+./calc.at:1397: cat stderr
+stderr:
+1.14: memory exhausted
+./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+1.14: memory exhausted
 ./calc.at:1395: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -89799,10 +90233,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.11: syntax error
-1.1-16: error: 2222 != 1
-./calc.at:1395: cat stderr
-./calc.at:1397: "$PERL" -pi -e 'use strict;
+input:
+./calc.at:1398: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89812,52 +90244,42 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1400: cat stderr
+./calc.at:1398: cat stderr
+input:
+  | (#) + (#) = 2222
+./calc.at:1397:  $PREPARSER ./calc  input
+./calc.at:1395: cat stderr
+input:
+stderr:
+  | (1 + # + 1) = 1111
+./calc.at:1394:  $PREPARSER ./calc  input
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | (#) + (#) = 2222
+./calc.at:1400:  $PREPARSER ./calc  input
+stderr:
+stderr:
+  | (- *) + (1 2) = 1
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
   | (1 + #) = 1111
 ./calc.at:1395:  $PREPARSER ./calc  input
-./calc.at:1397: cat stderr
 stderr:
 syntax error: invalid character: '#'
-./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-input:
-./calc.at:1405: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
-  | (- *) + (1 2) = 1
-./calc.at:1397:  $PREPARSER ./calc  input
+./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 syntax error: invalid character: '#'
+./calc.at:1398:  $PREPARSER ./calc  input
+./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
 stderr:
-stdout:
-./calc.at:1395: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1398: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c
-
-./calc.at:1395: cat stderr
 ./calc.at:1397: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -89868,52 +90290,25 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1398:  $PREPARSER ./calc  input
-  | (# + 1) = 1111
-./calc.at:1395:  $PREPARSER ./calc  input
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
+1.1-17: error: 2222 != 1
+./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
 stderr:
 stderr:
-./calc.at:1397: cat stderr
 syntax error: invalid character: '#'
-./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
+1.1-17: error: 2222 != 1
 stderr:
 syntax error: invalid character: '#'
+./calc.at:1397: cat stderr
+./calc.at:1400: cat stderr
 input:
-stderr:
-  | (* *) + (*) + (*)
+  | (1 + #) = 1111
 ./calc.at:1397:  $PREPARSER ./calc  input
-input:
-stderr:
-  | 1 2
-./calc.at:1398:  $PREPARSER ./calc  input
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.3: syntax error, unexpected number
-stderr:
-./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-stderr:
 ./calc.at:1395: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -89924,8 +90319,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.3: syntax error, unexpected number
-./calc.at:1397: "$PERL" -pi -e 'use strict;
+stderr:
+./calc.at:1398: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89935,8 +90330,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
 ./calc.at:1395: cat stderr
-./calc.at:1398: "$PERL" -pi -e 'use strict;
+  | (1 + #) = 1111
+./calc.at:1400:  $PREPARSER ./calc  input
+1.6: syntax error: invalid character: '#'
+./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.6: syntax error: invalid character: '#'
+stderr:
+./calc.at:1394: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89947,40 +90350,17 @@
   }eg
 ' expout || exit 77
 ./calc.at:1398: cat stderr
+1.6: syntax error: invalid character: '#'
+./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-./calc.at:1397: cat stderr
-  | (1 + # + 1) = 1111
+input:
+  | (# + 1) = 1111
 ./calc.at:1395:  $PREPARSER ./calc  input
 stderr:
-input:
-  | 1//2
-./calc.at:1398:  $PREPARSER ./calc  input
 syntax error: invalid character: '#'
 ./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1397:  $PREPARSER ./calc  input
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-syntax error: invalid character: '#'
-stderr:
-./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-stderr:
-./calc.at:1398: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1395: "$PERL" -pi -e 'use strict;
+./calc.at:1394: cat stderr
+./calc.at:1397: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -89990,40 +90370,21 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1397:  $PREPARSER ./calc  input
-./calc.at:1398: cat stderr
-stderr:
-./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1395: cat stderr
-input:
 stderr:
-  | error
-./calc.at:1398:  $PREPARSER ./calc  input
 stderr:
 input:
-1.1: syntax error, unexpected invalid token
-./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error: invalid character: '#'
+1.6: syntax error: invalid character: '#'
+  | (* *) + (*) + (*)
+./calc.at:1398:  $PREPARSER ./calc  input
   | (1 + 1) / (1 - 1)
-./calc.at:1395:  $PREPARSER ./calc  input
-stderr:
+./calc.at:1400: cat stderr
+./calc.at:1394:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1397: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-1.1: syntax error, unexpected invalid token
 error: null divisor
-./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1398: "$PERL" -pi -e 'use strict;
+./calc.at:1395: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -90033,38 +90394,39 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-error: null divisor
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./calc.at:1397: cat stderr
-./calc.at:1398: cat stderr
+./calc.at:1395: cat stderr
+error: null divisor
 input:
+stderr:
 input:
-./calc.at:1395: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-  | 1 + 2 * 3 + !* ++
+  | (# + 1) = 1111
+./calc.at:1400:  $PREPARSER ./calc  input
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+  | (# + 1) = 1111
 ./calc.at:1397:  $PREPARSER ./calc  input
 stderr:
-  | 1 = 2 = 3
-./calc.at:1398:  $PREPARSER ./calc  input
-1.14: memory exhausted
+stderr:
+input:
+1.2: syntax error: invalid character: '#'
 ./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error: invalid character: '#'
+./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (1 + # + 1) = 1111
+./calc.at:1395:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1395: cat stderr
-1.7: syntax error, unexpected '='
-./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.14: memory exhausted
 stderr:
-507. calc.at:1395:  ok
-1.7: syntax error, unexpected '='
-./calc.at:1397: "$PERL" -pi -e 'use strict;
+syntax error: invalid character: '#'
+./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1394: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -90084,28 +90446,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1397: cat stderr
-./calc.at:1398: cat stderr
-
-input:
-  | (#) + (#) = 2222
-./calc.at:1397:  $PREPARSER ./calc  input
-input:
-stderr:
-  | 
-  | +1
-./calc.at:1398:  $PREPARSER ./calc  input
 1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-2.1: syntax error, unexpected '+'
-stderr:
-./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-stderr:
-2.1: syntax error, unexpected '+'
 ./calc.at:1397: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -90116,7 +90458,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1398: "$PERL" -pi -e 'use strict;
+stderr:
+./calc.at:1400: cat stderr
+syntax error: invalid character: '#'
+./calc.at:1398: cat stderr
+./calc.at:1394: cat stderr
+input:
+./calc.at:1395: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -90126,66 +90474,46 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1398: cat stderr
-./calc.at:1398:  $PREPARSER ./calc  /dev/null
+506. calc.at:1394:  ok
+input:
 ./calc.at:1397: cat stderr
+  | (1 + # + 1) = 1111
+./calc.at:1395: cat stderr
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1398:  $PREPARSER ./calc  input
+./calc.at:1400:  $PREPARSER ./calc  input
 stderr:
-515. calc.at:1407: testing Calculator %glr-parser %debug  ...
-./calc.at:1407: mv calc.y.tmp calc.y
-
-1.1: syntax error, unexpected end of input
-./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1407: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-stderr:
-1.1: syntax error, unexpected end of input
 input:
-  | (1 + #) = 1111
+input:
+1.6: syntax error: invalid character: '#'
+./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (1 + 1) / (1 - 1)
+./calc.at:1395:  $PREPARSER ./calc  input
+  | (1 + # + 1) = 1111
+stderr:
+stderr:
 ./calc.at:1397:  $PREPARSER ./calc  input
+
+./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 1.6: syntax error: invalid character: '#'
 ./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1398: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 stderr:
+error: null divisor
+./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 1.6: syntax error: invalid character: '#'
-./calc.at:1398: cat stderr
-./calc.at:1397: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+stderr:
+stderr:
+error: null divisor
+stderr:
 input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+1.6: syntax error: invalid character: '#'
+  | 1 + 2 * 3 + !- ++
 ./calc.at:1398:  $PREPARSER ./calc  input
+./calc.at:1400: cat stderr
 stderr:
-./calc.at:1397: cat stderr
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.1-46: error: 4444 != 1
 ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.1-46: error: 4444 != 1
-input:
-./calc.at:1398: "$PERL" -pi -e 'use strict;
+./calc.at:1397: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -90195,18 +90523,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | (# + 1) = 1111
-./calc.at:1397:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1398: cat stderr
-1.2: syntax error: invalid character: '#'
-./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-stderr:
-1.2: syntax error: invalid character: '#'
-stdout:
-./calc.at:1407: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
-./calc.at:1397: "$PERL" -pi -e 'use strict;
+./calc.at:1395: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -90217,45 +90535,15 @@
   }eg
 ' expout || exit 77
 input:
-  | (!!) + (1 2) = 1
-./calc.at:1398:  $PREPARSER ./calc  input
+  | (1 + 1) / (1 - 1)
+./calc.at:1400:  $PREPARSER ./calc  input
+./calc.at:1395: cat stderr
 stderr:
-./calc.at:1401: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c calc.h
+1.11-17: error: null divisor
+./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+514. calc.at:1405: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose  ...
+./calc.at:1405: mv calc.y.tmp calc.y
 
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
-./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-stderr:
-./calc.at:1401:  $PREPARSER ./calc  input
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
-stderr:
-./calc.at:1397: cat stderr
-./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 stderr:
 ./calc.at:1398: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
@@ -90267,9 +90555,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
 stdout:
-input:
-./calc.at:1400: "$PERL" -ne '
+507. calc.at:1395:  ok
+1.11-17: error: null divisor
+./calc.at:1405: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+./calc.at:1397: cat stderr
+./calc.at:1401: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -90280,18 +90572,17 @@
         || /\t/
         )' calc.c calc.h
 
-  | 1 2
-./calc.at:1401:  $PREPARSER ./calc  input
-stderr:
-1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
-./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-stderr:
+./calc.at:1398: cat stderr
 input:
-  | (1 + # + 1) = 1111
-1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+  | (1 + 1) / (1 - 1)
 ./calc.at:1397:  $PREPARSER ./calc  input
+
 stderr:
+./calc.at:1400: cat stderr
+input:
+1.11-17: error: null divisor
+./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -90305,34 +90596,23 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1398: cat stderr
-./calc.at:1400:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.6: syntax error: invalid character: '#'
-./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-stderr:
-  | (- *) + (1 2) = 1
+./calc.at:1401:  $PREPARSER ./calc  input
+  | 1 + 2 * 3 + !* ++
 ./calc.at:1398:  $PREPARSER ./calc  input
-./calc.at:1401: cat stderr
-1.6: syntax error: invalid character: '#'
 stderr:
 stderr:
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
+510. calc.at:1400:  ok
+./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.11-17: error: null divisor
+1.14: memory exhausted
 ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 1//2
-./calc.at:1401:  $PREPARSER ./calc  input
 stderr:
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
+./calc.at:1405: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
 stderr:
+1.14: memory exhausted
 input:
-./calc.at:1397: "$PERL" -pi -e 'use strict;
+./calc.at:1398: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -90343,15 +90623,8 @@
   }eg
 ' expout || exit 77
   | 1 2
-./calc.at:1400:  $PREPARSER ./calc  input
-1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
-./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1398: "$PERL" -pi -e 'use strict;
+./calc.at:1401:  $PREPARSER ./calc  input
+./calc.at:1397: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -90361,47 +90634,38 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1397: cat stderr
+515. calc.at:1407: testing Calculator %glr-parser %debug  ...
 stderr:
+./calc.at:1407: mv calc.y.tmp calc.y
+
+
+./calc.at:1407: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
 1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1398: cat stderr
-./calc.at:1401: cat stderr
-input:
+stderr:
+1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
 input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1397:  $PREPARSER ./calc  input
-  | (* *) + (*) + (*)
+./calc.at:1397: cat stderr
+  | (#) + (#) = 2222
 ./calc.at:1398:  $PREPARSER ./calc  input
-./calc.at:1400: cat stderr
-input:
-stderr:
 stderr:
-  | error
-./calc.at:1401:  $PREPARSER ./calc  input
-1.11-17: error: null divisor
-./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
 ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+508. calc.at:1397:  ok
+./calc.at:1401: cat stderr
+516. calc.at:1408: testing Calculator %glr-parser parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose  ...
+./calc.at:1408: mv calc.y.tmp calc.y
+
 stderr:
-stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1408: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+./calc.at:1407: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c $LIBS
 input:
-stderr:
   | 1//2
-./calc.at:1400:  $PREPARSER ./calc  input
-1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.11-17: error: null divisor
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-stderr:
-stderr:
-1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1401: cat stderr
+./calc.at:1401:  $PREPARSER ./calc  input
 ./calc.at:1398: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -90412,9 +90676,24 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+
 stderr:
 1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1397: "$PERL" -pi -e 'use strict;
+./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1398: cat stderr
+stderr:
+1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
+input:
+  | (1 + #) = 1111
+./calc.at:1398:  $PREPARSER ./calc  input
+./calc.at:1408: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
+stderr:
+1.6: syntax error: invalid character: '#'
+./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1401: cat stderr
+stderr:
+1.6: syntax error: invalid character: '#'
+./calc.at:1398: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -90424,45 +90703,33 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1398: cat stderr
 input:
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1398:  $PREPARSER ./calc  input
-  | 1 = 2 = 3
+./calc.at:1398: cat stderr
+  | error
 ./calc.at:1401:  $PREPARSER ./calc  input
-./calc.at:1400: cat stderr
-stderr:
-stderr:
-./calc.at:1397: cat stderr
-./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
-./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 stderr:
-1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
-508. calc.at:1397:  ok
 input:
-input:
-  | error
-./calc.at:1400:  $PREPARSER ./calc  input
-stderr:
-  | 1 + 2 * 3 + !- ++
+  | (# + 1) = 1111
 ./calc.at:1398:  $PREPARSER ./calc  input
-stderr:
 1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1401: cat stderr
-./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+517. calc.at:1409: testing Calculator %glr-parser parse.error=verbose %debug %locations %header api.prefix={calc} api.token.prefix={TOK_} %verbose  ...
 stderr:
+./calc.at:1409: mv calc.y.tmp calc.y
 
 1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-stderr:
+1.2: syntax error: invalid character: '#'
+./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1409: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+./calc.at:1401: cat stderr
 input:
-  | 
-  | +1
+stderr:
+  | 1 = 2 = 3
 ./calc.at:1401:  $PREPARSER ./calc  input
-./calc.at:1400: cat stderr
+stderr:
+1.2: syntax error: invalid character: '#'
+1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
 ./calc.at:1398: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -90473,40 +90740,27 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
 ./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-input:
 ./calc.at:1398: cat stderr
-2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
-  | 1 = 2 = 3
-./calc.at:1400:  $PREPARSER ./calc  input
-stderr:
 1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
-./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | 1 + 2 * 3 + !* ++
-stderr:
+  | (1 + # + 1) = 1111
 ./calc.at:1398:  $PREPARSER ./calc  input
 ./calc.at:1401: cat stderr
 stderr:
-1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
-./calc.at:1401:  $PREPARSER ./calc  /dev/null
-1.14: memory exhausted
+1.6: syntax error: invalid character: '#'
 ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1400: cat stderr
-stderr:
-1.14: memory exhausted
-1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
 input:
   | 
   | +1
-./calc.at:1400:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1401:  $PREPARSER ./calc  input
+1.6: syntax error: invalid character: '#'
+stderr:
+2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1409: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
 ./calc.at:1398: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -90518,50 +90772,24 @@
   }eg
 ' expout || exit 77
 stderr:
-516. calc.at:1408: testing Calculator %glr-parser parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose  ...
-./calc.at:1408: mv calc.y.tmp calc.y
-
-./calc.at:1401: cat stderr
 2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1398: cat stderr
-./calc.at:1408: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-stderr:
-input:
-2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
 input:
-  | (#) + (#) = 2222
+./calc.at:1401: cat stderr
+  | (1 + 1) / (1 - 1)
 ./calc.at:1398:  $PREPARSER ./calc  input
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1401:  $PREPARSER ./calc  input
+./calc.at:1401:  $PREPARSER ./calc  /dev/null
 stderr:
-1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.1-46: error: 4444 != 1
-./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1400: cat stderr
 stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
+1.11-17: error: null divisor
 ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1400:  $PREPARSER ./calc  /dev/null
-1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.1-46: error: 4444 != 1
+1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
 1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-stderr:
+1.11-17: error: null divisor
 ./calc.at:1401: cat stderr
-1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
 ./calc.at:1398: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -90573,300 +90801,103 @@
   }eg
 ' expout || exit 77
 input:
-./calc.at:1400: cat stderr
-  | (!!) + (1 2) = 1
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 ./calc.at:1401:  $PREPARSER ./calc  input
-stderr:
-1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-16: error: 2222 != 1
-./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1398: cat stderr
-input:
-stderr:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1400:  $PREPARSER ./calc  input
-1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-16: error: 2222 != 1
-input:
 stderr:
-  | (1 + #) = 1111
-./calc.at:1398:  $PREPARSER ./calc  input
 1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
 1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
 1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 1.1-46: error: 4444 != 1
-./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1401: cat stderr
-stderr:
+./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+509. calc.at:1398:  ok
 stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
 1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
 1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 1.1-46: error: 4444 != 1
+./calc.at:1401: cat stderr
 input:
-stderr:
-  | (- *) + (1 2) = 1
-./calc.at:1401:  $PREPARSER ./calc  input
-1.6: syntax error: invalid character: '#'
-stderr:
-1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-17: error: 2222 != 1
-./calc.at:1400: cat stderr
-./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1408: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
-./calc.at:1398: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-stderr:
-1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-17: error: 2222 != 1
+
   | (!!) + (1 2) = 1
-./calc.at:1400:  $PREPARSER ./calc  input
+./calc.at:1401:  $PREPARSER ./calc  input
 stderr:
 1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
 1.1-16: error: 2222 != 1
-./calc.at:1398: cat stderr
-./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
 1.1-16: error: 2222 != 1
 ./calc.at:1401: cat stderr
+518. calc.at:1411: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose  ...
 input:
-input:
-./calc.at:1400: cat stderr
-  | (# + 1) = 1111
-./calc.at:1398:  $PREPARSER ./calc  input
-  | (* *) + (*) + (*)
-./calc.at:1401:  $PREPARSER ./calc  input
-stderr:
-stderr:
-1.2: syntax error: invalid character: '#'
-./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
+./calc.at:1411: mv calc.y.tmp calc.y
+
   | (- *) + (1 2) = 1
-./calc.at:1400:  $PREPARSER ./calc  input
-stderr:
+./calc.at:1401:  $PREPARSER ./calc  input
 stderr:
 1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
 1.1-17: error: 2222 != 1
-./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.2: syntax error: invalid character: '#'
-1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1401: cat stderr
-./calc.at:1398: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
 1.1-17: error: 2222 != 1
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1401:  $PREPARSER ./calc  input
-./calc.at:1400: cat stderr
-./calc.at:1398: cat stderr
-stderr:
-./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-stderr:
+./calc.at:1411: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+./calc.at:1401: cat stderr
 input:
   | (* *) + (*) + (*)
-input:
-./calc.at:1400:  $PREPARSER ./calc  input
-  | 1 + 2 * 3 + !- ++
 ./calc.at:1401:  $PREPARSER ./calc  input
-  | (1 + # + 1) = 1111
-./calc.at:1398:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 stderr:
 1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.6: syntax error: invalid character: '#'
-./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
+./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.6: syntax error: invalid character: '#'
 ./calc.at:1401: cat stderr
-./calc.at:1400: cat stderr
-./calc.at:1398: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 input:
-input:
-./calc.at:1398: cat stderr
-  | 1 + 2 * 3 + !* ++
-./calc.at:1401:  $PREPARSER ./calc  input
   | 1 + 2 * 3 + !+ ++
-./calc.at:1400:  $PREPARSER ./calc  input
-stderr:
+./calc.at:1401:  $PREPARSER ./calc  input
 stderr:
-1.14: memory exhausted
-./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1411: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
 ./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1398:  $PREPARSER ./calc  input
-stderr:
 stderr:
-stderr:
-1.14: memory exhausted
 input:
-1.11-17: error: null divisor
-./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 1 + 2 * 3 + !- ++
-./calc.at:1400:  $PREPARSER ./calc  input
-stderr:
-stderr:
-./calc.at:1401: cat stderr
-./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.11-17: error: null divisor
-stderr:
-input:
-  | (#) + (#) = 2222
 ./calc.at:1401:  $PREPARSER ./calc  input
 stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1400: cat stderr
-./calc.at:1398: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 ./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1398: cat stderr
-input:
-509. calc.at:1398:  ok
 ./calc.at:1401: cat stderr
-  | 1 + 2 * 3 + !* ++
-./calc.at:1400:  $PREPARSER ./calc  input
-stderr:
-1.14: memory exhausted
-./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | (1 + #) = 1111
-stderr:
+  | 1 + 2 * 3 + !* ++
 ./calc.at:1401:  $PREPARSER ./calc  input
-
 stderr:
 1.14: memory exhausted
-1.6: syntax error: invalid character: '#'
-./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1400: cat stderr
-1.6: syntax error: invalid character: '#'
-input:
-./calc.at:1401: cat stderr
-  | (#) + (#) = 2222
-./calc.at:1400:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1401:  $PREPARSER ./calc  input
-stderr:
-stderr:
-1.2: syntax error: invalid character: '#'
-./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-1.2: syntax error: invalid character: '#'
-./calc.at:1401: cat stderr
-./calc.at:1400: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1401:  $PREPARSER ./calc  input
-input:
-  | (1 + #) = 1111
-./calc.at:1400:  $PREPARSER ./calc  input
-stderr:
-stderr:
-1.6: syntax error: invalid character: '#'
 ./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.6: syntax error: invalid character: '#'
-./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-517. calc.at:1409: testing Calculator %glr-parser parse.error=verbose %debug %locations %header api.prefix={calc} api.token.prefix={TOK_} %verbose  ...
-./calc.at:1409: mv calc.y.tmp calc.y
-
 stderr:
 stderr:
-1.6: syntax error: invalid character: '#'
-1.6: syntax error: invalid character: '#'
-./calc.at:1409: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-./calc.at:1400: cat stderr
-input:
-./calc.at:1401: cat stderr
-  | (# + 1) = 1111
-./calc.at:1400:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error: invalid character: '#'
-./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error: invalid character: '#'
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1401:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1400: cat stderr
+1.14: memory exhausted
 stdout:
 stderr:
-1.11-17: error: null divisor
-./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stdout:
+./calc.at:1403: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c calc.h
+
 ./calc.at:1402: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
@@ -90878,14 +90909,9 @@
         || /\t/
         )' calc.c calc.h
 
-stderr:
+./calc.at:1401: cat stderr
 input:
-1.11-17: error: null divisor
-  | (1 + # + 1) = 1111
-./calc.at:1400:  $PREPARSER ./calc  input
 input:
-stderr:
-./calc.at:1401: cat stderr
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -90900,55 +90926,51 @@
   | 2^2^3 = 256
   | (2^2)^3 = 64
 ./calc.at:1402:  $PREPARSER ./calc  input
-1.6: syntax error: invalid character: '#'
-./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1403:  $PREPARSER ./calc  input
 stderr:
-511. calc.at:1401: stderr:
- ok
 ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.6: syntax error: invalid character: '#'
 stderr:
-./calc.at:1409: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
+stderr:
 input:
-./calc.at:1400: cat stderr
+input:
+  | (#) + (#) = 2222
+./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1401:  $PREPARSER ./calc  input
   | 1 2
 ./calc.at:1402:  $PREPARSER ./calc  input
-
 stderr:
+stderr:
+stderr:
+input:
 1.3: syntax error, unexpected number
 ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1400:  $PREPARSER ./calc  input
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
 stderr:
+./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.11-17: error: null divisor
-./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 2
 1.3: syntax error, unexpected number
+./calc.at:1403:  $PREPARSER ./calc  input
 stderr:
-1.11-17: error: null divisor
-./calc.at:1402: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1402: cat stderr
-./calc.at:1400: cat stderr
-input:
-  | 1//2
-./calc.at:1402:  $PREPARSER ./calc  input
-510. calc.at:1400:  ok
-stderr:
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+1.3: syntax error, unexpected number
+./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-
 ./calc.at:1402: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -90959,20 +90981,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1402: cat stderr
+1.3: syntax error, unexpected number
+./calc.at:1401: cat stderr
 input:
-  | error
-./calc.at:1402:  $PREPARSER ./calc  input
-stderr:
-1.1: syntax error, unexpected invalid token
-./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-518. calc.at:1411: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose  ...
-./calc.at:1411: mv calc.y.tmp calc.y
-
-1.1: syntax error, unexpected invalid token
-./calc.at:1411: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-./calc.at:1402: "$PERL" -pi -e 'use strict;
+./calc.at:1402: cat stderr
+./calc.at:1403: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -90982,85 +90995,37 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1402: cat stderr
+  | (1 + #) = 1111
+./calc.at:1401:  $PREPARSER ./calc  input
+stderr:
 input:
-  | 1 = 2 = 3
+1.6: syntax error: invalid character: '#'
+./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1403: cat stderr
+  | 1//2
 ./calc.at:1402:  $PREPARSER ./calc  input
 stderr:
-1.7: syntax error, unexpected '='
-./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.7: syntax error, unexpected '='
-519. calc.at:1413: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
-./calc.at:1413: mv calc.y.tmp calc.y
-
-./calc.at:1413: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-./calc.at:1402: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1402: cat stderr
 input:
-  | 
-  | +1
-./calc.at:1402:  $PREPARSER ./calc  input
-stderr:
-2.1: syntax error, unexpected '+'
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+1.6: syntax error: invalid character: '#'
 ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1//2
+./calc.at:1403:  $PREPARSER ./calc  input
 stderr:
-2.1: syntax error, unexpected '+'
-./calc.at:1411: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
-./calc.at:1402: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1402: cat stderr
-./calc.at:1402:  $PREPARSER ./calc  /dev/null
 stderr:
-1.1: syntax error, unexpected end of file
-./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1401: cat stderr
 stderr:
-1.1: syntax error, unexpected end of file
-./calc.at:1413: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
-./calc.at:1402: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1402: cat stderr
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
 input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1402:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.1-46: error: 4444 != 1
-./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (# + 1) = 1111
+./calc.at:1401:  $PREPARSER ./calc  input
 stderr:
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.1-46: error: 4444 != 1
+1.2: syntax error: invalid character: '#'
+./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1402: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -91071,18 +91036,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1402: cat stderr
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1402:  $PREPARSER ./calc  input
-stderr:
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
-./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
-./calc.at:1402: "$PERL" -pi -e 'use strict;
+./calc.at:1403: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -91092,65 +91046,35 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1402: cat stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1402:  $PREPARSER ./calc  input
-stderr:
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
-./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1403: cat stderr
 stderr:
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
-./calc.at:1402: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 ./calc.at:1402: cat stderr
+1.2: syntax error: invalid character: '#'
+./calc.at:1401: cat stderr
 input:
-  | (* *) + (*) + (*)
+input:
+  | error
 ./calc.at:1402:  $PREPARSER ./calc  input
 stderr:
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.1: syntax error, unexpected invalid token
 ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-./calc.at:1402: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1402: cat stderr
+  | error
+./calc.at:1403:  $PREPARSER ./calc  input
 input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1402:  $PREPARSER ./calc  input
+  | (1 + # + 1) = 1111
+./calc.at:1401:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.1: syntax error, unexpected invalid token
 stderr:
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1402:  $PREPARSER ./calc  input
+./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.6: syntax error: invalid character: '#'
+./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.1: syntax error, unexpected invalid token
 stderr:
+stderr:
+1.6: syntax error: invalid character: '#'
+1.1: syntax error, unexpected invalid token
 ./calc.at:1402: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -91161,16 +91085,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1402: cat stderr
-input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1402:  $PREPARSER ./calc  input
-stderr:
-1.14: memory exhausted
-./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.14: memory exhausted
-./calc.at:1402: "$PERL" -pi -e 'use strict;
+./calc.at:1403: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -91181,54 +91096,32 @@
   }eg
 ' expout || exit 77
 ./calc.at:1402: cat stderr
+./calc.at:1401: cat stderr
+./calc.at:1403: cat stderr
 input:
-  | (#) + (#) = 2222
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1401:  $PREPARSER ./calc  input
+  | 1 = 2 = 3
 ./calc.at:1402:  $PREPARSER ./calc  input
 stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1402: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1402: cat stderr
 input:
-  | (1 + #) = 1111
-./calc.at:1402:  $PREPARSER ./calc  input
 stderr:
-1.6: syntax error: invalid character: '#'
+1.11-17: error: null divisor
+./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 = 2 = 3
+./calc.at:1403:  $PREPARSER ./calc  input
+1.7: syntax error, unexpected '='
 ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1402: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1402: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1402:  $PREPARSER ./calc  input
 stderr:
-1.2: syntax error: invalid character: '#'
-./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.7: syntax error, unexpected '='
+./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.11-17: error: null divisor
 stderr:
-1.2: syntax error: invalid character: '#'
+1.7: syntax error, unexpected '='
+stderr:
+1.7: syntax error, unexpected '='
 ./calc.at:1402: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -91240,15 +91133,8 @@
   }eg
 ' expout || exit 77
 ./calc.at:1402: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1402:  $PREPARSER ./calc  input
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1402: "$PERL" -pi -e 'use strict;
+./calc.at:1401: cat stderr
+./calc.at:1403: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -91258,36 +91144,23 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1402: cat stderr
+511. calc.at:1401:  ok
 input:
-  | (1 + 1) / (1 - 1)
+  | 
+  | +1
 ./calc.at:1402:  $PREPARSER ./calc  input
+./calc.at:1403: cat stderr
 stderr:
-1.11-17: error: null divisor
-./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.11-17: error: null divisor
-./calc.at:1402: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1402: cat stderr
-512. calc.at:1402:  ok
-
-520. calc.at:1414: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
-./calc.at:1414: mv calc.y.tmp calc.y
-
-./calc.at:1414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
 stderr:
-./calc.at:1414: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
+2.1: syntax error, unexpected '+'
+./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stdout:
-./calc.at:1403: "$PERL" -ne '
+input:
+  | 
+  | +1
+./calc.at:1403:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1405: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -91298,7 +91171,12 @@
         || /\t/
         )' calc.c calc.h
 
+2.1: syntax error, unexpected '+'
+
+stderr:
 input:
+2.1: syntax error, unexpected '+'
+./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -91312,57 +91190,10 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1403:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-input:
-  | 1 2
-./calc.at:1403:  $PREPARSER ./calc  input
-stderr:
-1.3: syntax error, unexpected number
-./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.3: syntax error, unexpected number
-./calc.at:1403: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1403: cat stderr
-input:
-  | 1//2
-./calc.at:1403:  $PREPARSER ./calc  input
-stderr:
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1403: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1403: cat stderr
-input:
-  | error
-./calc.at:1403:  $PREPARSER ./calc  input
+./calc.at:1405:  $PREPARSER ./calc  input
 stderr:
-1.1: syntax error, unexpected invalid token
-./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.1: syntax error, unexpected invalid token
-./calc.at:1403: "$PERL" -pi -e 'use strict;
+./calc.at:1402: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -91372,46 +91203,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1403: cat stderr
-input:
-stderr:
-  | 1 = 2 = 3
-./calc.at:1403:  $PREPARSER ./calc  input
-stdout:
-./calc.at:1405: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c calc.h
-
-stderr:
-1.7: syntax error, unexpected '='
-./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-stderr:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1405:  $PREPARSER ./calc  input
-stderr:
-1.7: syntax error, unexpected '='
 ./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+2.1: syntax error, unexpected '+'
 stderr:
+./calc.at:1402: cat stderr
 ./calc.at:1403: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -91425,23 +91220,31 @@
 input:
   | 1 2
 ./calc.at:1405:  $PREPARSER ./calc  input
+./calc.at:1402:  $PREPARSER ./calc  /dev/null
 ./calc.at:1403: cat stderr
 stderr:
 1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
 ./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 
-  | +1
-./calc.at:1403:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1403:  $PREPARSER ./calc  /dev/null
+stderr:
+1.1: syntax error, unexpected end of file
+./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-2.1: syntax error, unexpected '+'
 1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+1.1: syntax error, unexpected end of input
 ./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-2.1: syntax error, unexpected '+'
+519. calc.at:1413: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
+./calc.at:1413: mv calc.y.tmp calc.y
+
+stderr:
+1.1: syntax error, unexpected end of file
+1.1: syntax error, unexpected end of input
 ./calc.at:1405: cat stderr
-./calc.at:1403: "$PERL" -pi -e 'use strict;
+./calc.at:1413: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+input:
+./calc.at:1402: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -91451,21 +91254,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1403: cat stderr
-input:
   | 1//2
 ./calc.at:1405:  $PREPARSER ./calc  input
-./calc.at:1403:  $PREPARSER ./calc  /dev/null
-stderr:
-stderr:
-1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.1: syntax error, unexpected end of input
-./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-stderr:
-1.1: syntax error, unexpected end of input
-1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
+./calc.at:1402: cat stderr
 ./calc.at:1403: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -91476,15 +91268,23 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1405: cat stderr
+1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
+./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./calc.at:1403: cat stderr
+1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
 input:
-  | error
-./calc.at:1405:  $PREPARSER ./calc  input
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1402:  $PREPARSER ./calc  input
 stderr:
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.1-46: error: 4444 != 1
+./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1405: cat stderr
 input:
-1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 ./calc.at:1403:  $PREPARSER ./calc  input
 stderr:
@@ -91495,44 +91295,25 @@
 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 1.1-46: error: 4444 != 1
 ./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-stderr:
 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 1.1-46: error: 4444 != 1
-./calc.at:1405: cat stderr
-./calc.at:1403: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 input:
-./calc.at:1403: cat stderr
-  | 1 = 2 = 3
+stderr:
+  | error
 ./calc.at:1405:  $PREPARSER ./calc  input
 stderr:
-1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.1-46: error: 4444 != 1
+1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
 ./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-stderr:
-  | (!!) + (1 2) = 1
-./calc.at:1403:  $PREPARSER ./calc  input
-1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
-stderr:
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
-./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1413: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
 stderr:
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
-./calc.at:1405: cat stderr
 ./calc.at:1403: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -91543,32 +91324,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | 
-  | +1
-./calc.at:1405:  $PREPARSER ./calc  input
-./calc.at:1403: cat stderr
-stderr:
-2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1403:  $PREPARSER ./calc  input
-stderr:
-stderr:
-2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
-./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1405: cat stderr
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
-./calc.at:1405:  $PREPARSER ./calc  /dev/null
-stderr:
-./calc.at:1403: "$PERL" -pi -e 'use strict;
+./calc.at:1402: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -91578,36 +91334,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1402: cat stderr
+./calc.at:1405: cat stderr
 ./calc.at:1403: cat stderr
 stderr:
-1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+stdout:
 input:
-./calc.at:1405: cat stderr
-  | (* *) + (*) + (*)
+  | (!!) + (1 2) = 1
 ./calc.at:1403:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-stderr:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1405:  $PREPARSER ./calc  input
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-stderr:
-stderr:
-1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.1-46: error: 4444 != 1
-./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stdout:
 ./calc.at:1407: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
@@ -91619,23 +91355,26 @@
         || /\t/
         )' calc.c
 
+input:
+  | (!!) + (1 2) = 1
+./calc.at:1402:  $PREPARSER ./calc  input
 stderr:
-1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.1-46: error: 4444 != 1
-./calc.at:1403: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+1.11: syntax error, unexpected number
+1.1-16: error: 2222 != 1
+./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 = 2 = 3
+./calc.at:1405:  $PREPARSER ./calc  input
+stderr:
+1.11: syntax error, unexpected number
+1.1-16: error: 2222 != 1
+./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
+./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
+stderr:
+1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -91649,10 +91388,13 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
+stderr:
 ./calc.at:1407:  $PREPARSER ./calc  input
-./calc.at:1403: cat stderr
+1.11: syntax error, unexpected number
+1.1-16: error: 2222 != 1
+1.11: syntax error, unexpected number
+1.1-16: error: 2222 != 1
 stderr:
-./calc.at:1405: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -92490,6 +92232,32 @@
 Cleanup: popping token "end of input" (1.1: )
 Cleanup: popping nterm input (1.1: )
 ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1405: cat stderr
+./calc.at:1403: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+./calc.at:1402: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+  | 
+  | +1
+./calc.at:1405:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -93327,18 +93095,20 @@
 Entering state 17
 Cleanup: popping token "end of input" (1.1: )
 Cleanup: popping nterm input (1.1: )
+./calc.at:1403: cat stderr
 input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1403:  $PREPARSER ./calc  input
-input:
-input:
-  | (!!) + (1 2) = 1
+2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1402: cat stderr
+./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 1 2
 ./calc.at:1407:  $PREPARSER ./calc  input
-./calc.at:1405:  $PREPARSER ./calc  input
-stderr:
 stderr:
 stderr:
+input:
+input:
+2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
+  | (- *) + (1 2) = 1
+./calc.at:1403:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -93354,16 +93124,21 @@
 syntax error
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token "number" (1.1: 2)
-1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-16: error: 2222 != 1
+  | (- *) + (1 2) = 1
+./calc.at:1402:  $PREPARSER ./calc  input
 ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-16: error: 2222 != 1
 stderr:
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
+1.1-17: error: 2222 != 1
+./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
+1.1-17: error: 2222 != 1
+./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1405: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -93379,62 +93154,16 @@
 syntax error
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token "number" (1.1: 2)
-./calc.at:1405: cat stderr
-input:
-./calc.at:1407: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-  | 1 + 2 * 3 + !- ++
-./calc.at:1403:  $PREPARSER ./calc  input
 stderr:
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1405:  $PREPARSER ./calc  input
-./calc.at:1407: cat stderr
 stderr:
-1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
 1.1-17: error: 2222 != 1
-./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
 1.1-17: error: 2222 != 1
-input:
-  | 1//2
-./calc.at:1407:  $PREPARSER ./calc  input
-./calc.at:1405: cat stderr
+./calc.at:1405:  $PREPARSER ./calc  /dev/null
 stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '/' (1.1: )
-Shifting token '/' (1.1: )
-Entering state 23
-Reading a token
-Next token is token '/' (1.1: )
-syntax error
-Error: popping token '/' (1.1: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '/' (1.1: )
-./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1403: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -93445,42 +93174,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-./calc.at:1403: cat stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '/' (1.1: )
-Shifting token '/' (1.1: )
-Entering state 23
-Reading a token
-Next token is token '/' (1.1: )
-syntax error
-Error: popping token '/' (1.1: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '/' (1.1: )
-  | (* *) + (*) + (*)
-./calc.at:1405:  $PREPARSER ./calc  input
-input:
-stderr:
-1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
 ./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1 + 2 * 3 + !* ++
-./calc.at:1403:  $PREPARSER ./calc  input
-stderr:
-stderr:
-1.14: memory exhausted
 ./calc.at:1407: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -93491,15 +93186,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-stderr:
-1.14: memory exhausted
-./calc.at:1405: cat stderr
-./calc.at:1407: cat stderr
-./calc.at:1403: "$PERL" -pi -e 'use strict;
+./calc.at:1402: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -93509,42 +93196,42 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-input:
+./calc.at:1402: cat stderr
 ./calc.at:1403: cat stderr
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1405:  $PREPARSER ./calc  input
-  | error
-./calc.at:1407:  $PREPARSER ./calc  input
 stderr:
+input:
+1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+  | (* *) + (*) + (*)
+./calc.at:1402:  $PREPARSER ./calc  input
+input:
+  | (* *) + (*) + (*)
 stderr:
-./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "invalid token" (1.1: )
-syntax error
-Cleanup: discarding lookahead token "invalid token" (1.1: )
-./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1403:  $PREPARSER ./calc  input
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1405: cat stderr
+./calc.at:1407: cat stderr
 stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "invalid token" (1.1: )
-syntax error
-Cleanup: discarding lookahead token "invalid token" (1.1: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 input:
-  | (#) + (#) = 2222
-./calc.at:1403:  $PREPARSER ./calc  input
 input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1 + 2 * 3 + !- ++
 ./calc.at:1405:  $PREPARSER ./calc  input
-./calc.at:1407: "$PERL" -pi -e 'use strict;
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+stderr:
+./calc.at:1403: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -93554,14 +93241,14 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
+1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.1-46: error: 4444 != 1
 ./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1407: cat stderr
-stderr:
-./calc.at:1403: "$PERL" -pi -e 'use strict;
+  | 1//2
+./calc.at:1402: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -93571,11 +93258,14 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | 1 = 2 = 3
-./calc.at:1403: cat stderr
 ./calc.at:1407:  $PREPARSER ./calc  input
-./calc.at:1405: cat stderr
+stderr:
+./calc.at:1402: cat stderr
+input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1402:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -93588,35 +93278,27 @@
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Reading a token
-Next token is token '=' (1.1: )
-Shifting token '=' (1.1: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.1: 2)
-Shifting token "number" (1.1: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 2)
--> $$ = nterm exp (1.1: 2)
-Entering state 28
+Next token is token '/' (1.1: )
+Shifting token '/' (1.1: )
+Entering state 23
 Reading a token
-Next token is token '=' (1.1: )
+Next token is token '/' (1.1: )
 syntax error
-Error: popping nterm exp (1.1: 2)
-Error: popping token '=' (1.1: )
+Error: popping token '/' (1.1: )
 Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '=' (1.1: )
-input:
+Cleanup: discarding lookahead token '/' (1.1: )
+stderr:
 ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (1 + #) = 1111
-./calc.at:1403:  $PREPARSER ./calc  input
 input:
+./calc.at:1403: cat stderr
+1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.1-46: error: 4444 != 1
+  | 1 + 2 * 3 + !- ++
+./calc.at:1402:  $PREPARSER ./calc  input
 stderr:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1405:  $PREPARSER ./calc  input
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -93629,29 +93311,23 @@
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Reading a token
-Next token is token '=' (1.1: )
-Shifting token '=' (1.1: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.1: 2)
-Shifting token "number" (1.1: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 2)
--> $$ = nterm exp (1.1: 2)
-Entering state 28
+Next token is token '/' (1.1: )
+Shifting token '/' (1.1: )
+Entering state 23
 Reading a token
-Next token is token '=' (1.1: )
+Next token is token '/' (1.1: )
 syntax error
-Error: popping nterm exp (1.1: 2)
-Error: popping token '=' (1.1: )
+Error: popping token '/' (1.1: )
 Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '=' (1.1: )
-1.14: memory exhausted
+Cleanup: discarding lookahead token '/' (1.1: )
+./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+./calc.at:1405: cat stderr
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1403:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.6: syntax error: invalid character: '#'
-./calc.at:1403: "$PERL" -pi -e 'use strict;
+stderr:
+./calc.at:1407: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -93661,9 +93337,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
 stderr:
-1.14: memory exhausted
-./calc.at:1407: "$PERL" -pi -e 'use strict;
+  | (!!) + (1 2) = 1
+./calc.at:1405:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1402: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -93673,696 +93353,910 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1403: cat stderr
-./calc.at:1405: cat stderr
-./calc.at:1407: cat stderr
+stdout:
+stderr:
+1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-16: error: 2222 != 1
+./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | (# + 1) = 1111
+  | 1 + 2 * 3 + !- ++
 ./calc.at:1403:  $PREPARSER ./calc  input
 stderr:
-input:
-1.2: syntax error: invalid character: '#'
+./calc.at:1408: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c calc.h
+
+1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-16: error: 2222 != 1
+stderr:
 ./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1402: cat stderr
 input:
+./calc.at:1405: cat stderr
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
   | 
-  | +1
-./calc.at:1407:  $PREPARSER ./calc  input
-  | (#) + (#) = 2222
-./calc.at:1405:  $PREPARSER ./calc  input
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1408:  $PREPARSER ./calc  input
+input:
+  | 1 + 2 * 3 + !* ++
+./calc.at:1402:  $PREPARSER ./calc  input
 stderr:
 stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 76):
-   $1 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
-Entering state 6
-Reading a token
-Next token is token '+' (1.1: )
-syntax error
-Error: popping nterm input (1.1: )
-Cleanup: discarding lookahead token '+' (1.1: )
+./calc.at:1407: cat stderr
+1.14: memory exhausted
+./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | error
 stderr:
-./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.2: syntax error: invalid character: '#'
+./calc.at:1407:  $PREPARSER ./calc  input
 stderr:
 stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 76):
-   $1 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
-Entering state 6
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
 Reading a token
-Next token is token '+' (1.1: )
-syntax error
-Error: popping nterm input (1.1: )
-Cleanup: discarding lookahead token '+' (1.1: )
-./calc.at:1403: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1405: cat stderr
-./calc.at:1403: cat stderr
-./calc.at:1407: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-  | (1 + #) = 1111
-./calc.at:1405:  $PREPARSER ./calc  input
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1403:  $PREPARSER ./calc  input
-stderr:
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.6: syntax error: invalid character: '#'
-1.6: syntax error: invalid character: '#'
-./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1407: cat stderr
-./calc.at:1407:  $PREPARSER ./calc  /dev/null
-stderr:
-./calc.at:1405: cat stderr
-1.6: syntax error: invalid character: '#'
-stderr:
-Starting parse
-Entering state 0
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
 Reading a token
-Now at end of input.
-syntax error
-Cleanup: discarding lookahead token "end of input" (1.1: )
-./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-input:
-Starting parse
-Entering state 0
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
 Reading a token
-Now at end of input.
-syntax error
-Cleanup: discarding lookahead token "end of input" (1.1: )
-  | (# + 1) = 1111
-./calc.at:1405:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1403: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-1.2: syntax error: invalid character: '#'
-./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1403: cat stderr
-stderr:
-1.2: syntax error: invalid character: '#'
-./calc.at:1407: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1403:  $PREPARSER ./calc  input
-stderr:
-1.11-17: error: null divisor
-./calc.at:1405: cat stderr
-./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1407: cat stderr
-1.11-17: error: null divisor
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1405:  $PREPARSER ./calc  input
-input:
-stderr:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1407:  $PREPARSER ./calc  input
-./calc.at:1403: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-1.6: syntax error: invalid character: '#'
-./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-1.6: syntax error: invalid character: '#'
-Starting parse
-Entering state 0
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
 Reading a token
-Next token is token ')' (1.1: )
-syntax error
-Shifting token error (1.1: )
-Entering state 11
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 9 (line 107):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 19
 Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
+Next token is token "number" (1.13: 7)
+Shifting token "number" (1.13: 7)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.13: 7)
+-> $$ = nterm exp (1.13: 7)
+Entering state 28
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Next token is token '\n' (1.14-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-9: 7)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13: 7)
+-> $$ = nterm exp (1.1-13: 7)
+Entering state 8
+Next token is token '\n' (1.14-2.0: )
+Shifting token '\n' (1.14-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-13: 7)
+   $2 = token '\n' (1.14-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token "number" (2.1: 1)
+Shifting token "number" (2.1: 1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 12
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (2.1: 1)
+-> $$ = nterm exp (2.1: 1)
+Entering state 8
 Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
+Next token is token '+' (2.3: )
+Shifting token '+' (2.3: )
 Entering state 21
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token "number" (2.5: 2)
+Shifting token "number" (2.5: 2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (2.5: 2)
+-> $$ = nterm exp (2.5: 2)
 Entering state 30
 Reading a token
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1)
--> $$ = nterm exp (1.1: 2)
-Entering state 12
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
+Next token is token '*' (2.7: )
+Shifting token '*' (2.7: )
+Entering state 22
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token '-' (2.9: )
+Shifting token '-' (2.9: )
+Entering state 2
+Reading a token
+Next token is token "number" (2.10: 3)
+Shifting token "number" (2.10: 3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (2.10: 3)
+-> $$ = nterm exp (2.10: 3)
+Entering state 10
+Reading a token
+Next token is token '=' (2.12: )
+Reducing stack 0 by rule 11 (line 123):
+   $1 = token '-' (2.9: )
+   $2 = nterm exp (2.10: 3)
+-> $$ = nterm exp (2.9-10: -3)
+Entering state 31
+Next token is token '=' (2.12: )
+Reducing stack 0 by rule 9 (line 107):
+   $1 = nterm exp (2.5: 2)
+   $2 = token '*' (2.7: )
+   $3 = nterm exp (2.9-10: -3)
+-> $$ = nterm exp (2.5-10: -6)
 Entering state 30
+Next token is token '=' (2.12: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (2.1: 1)
+   $2 = token '+' (2.3: )
+   $3 = nterm exp (2.5-10: -6)
+-> $$ = nterm exp (2.1-10: -5)
+Entering state 8
+Next token is token '=' (2.12: )
+Shifting token '=' (2.12: )
+Entering state 19
 Reading a token
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 2)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1)
--> $$ = nterm exp (1.1: 3)
-Entering state 12
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
+Next token is token '-' (2.14: )
+Shifting token '-' (2.14: )
+Entering state 2
 Reading a token
-Next token is token ')' (1.1: )
-syntax error
-Error: popping token '+' (1.1: )
-Error: popping nterm exp (1.1: 3)
-Shifting token error (1.1: )
-Entering state 11
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 30
+Next token is token "number" (2.15: 5)
+Shifting token "number" (2.15: 5)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (2.15: 5)
+-> $$ = nterm exp (2.15: 5)
+Entering state 10
 Reading a token
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 2222)
+Next token is token '\n' (2.16-3.0: )
+Reducing stack 0 by rule 11 (line 123):
+   $1 = token '-' (2.14: )
+   $2 = nterm exp (2.15: 5)
+-> $$ = nterm exp (2.14-15: -5)
+Entering state 28
+Next token is token '\n' (2.16-3.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (2.1-10: -5)
+   $2 = token '=' (2.12: )
+   $3 = nterm exp (2.14-15: -5)
+-> $$ = nterm exp (2.1-15: -5)
 Entering state 8
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
+Next token is token '\n' (2.16-3.0: )
+Shifting token '\n' (2.16-3.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (2.1-15: -5)
+   $2 = token '\n' (2.16-3.0: )
+-> $$ = nterm line (2.1-3.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-2.0: )
+   $2 = nterm line (2.1-3.0: )
+-> $$ = nterm input (1.1-3.0: )
+Entering state 6
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Next token is token '\n' (3.1-4.0: )
+Shifting token '\n' (3.1-4.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (3.1-4.0: )
+-> $$ = nterm line (3.1-4.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-3.0: )
+   $2 = nterm line (3.1-4.0: )
+-> $$ = nterm input (1.1-4.0: )
+Entering state 6
 Reading a token
-Next token is token '*' (1.1: )
-syntax error
-Shifting token error (1.1: )
-Entering state 11
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
+Next token is token '-' (4.1: )
+Shifting token '-' (4.1: )
+Entering state 2
 Reading a token
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
+Next token is token "number" (4.2: 1)
+Shifting token "number" (4.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (4.2: 1)
+-> $$ = nterm exp (4.2: 1)
+Entering state 10
 Reading a token
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
+Next token is token '^' (4.3: )
+Shifting token '^' (4.3: )
+Entering state 24
 Reading a token
-Next token is token ')' (1.1: )
-Entering state 11
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 30
+Next token is token "number" (4.4: 2)
+Shifting token "number" (4.4: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (4.4: 2)
+-> $$ = nterm exp (4.4: 2)
+Entering state 33
 Reading a token
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 3333)
+Next token is token '=' (4.6: )
+Reducing stack 0 by rule 12 (line 124):
+   $1 = nterm exp (4.2: 1)
+   $2 = token '^' (4.3: )
+   $3 = nterm exp (4.4: 2)
+-> $$ = nterm exp (4.2-4: 1)
+Entering state 10
+Next token is token '=' (4.6: )
+Reducing stack 0 by rule 11 (line 123):
+   $1 = token '-' (4.1: )
+   $2 = nterm exp (4.2-4: 1)
+-> $$ = nterm exp (4.1-4: -1)
 Entering state 8
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
+Next token is token '=' (4.6: )
+Shifting token '=' (4.6: )
+Entering state 19
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Next token is token '-' (4.8: )
+Shifting token '-' (4.8: )
+Entering state 2
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token "number" (4.9: 1)
+Shifting token "number" (4.9: 1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 12
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (4.9: 1)
+-> $$ = nterm exp (4.9: 1)
+Entering state 10
 Reading a token
-Next token is token '*' (1.1: )
-Shifting token '*' (1.1: )
-Entering state 22
+Next token is token '\n' (4.10-5.0: )
+Reducing stack 0 by rule 11 (line 123):
+   $1 = token '-' (4.8: )
+   $2 = nterm exp (4.9: 1)
+-> $$ = nterm exp (4.8-9: -1)
+Entering state 28
+Next token is token '\n' (4.10-5.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (4.1-4: -1)
+   $2 = token '=' (4.6: )
+   $3 = nterm exp (4.8-9: -1)
+-> $$ = nterm exp (4.1-9: -1)
+Entering state 8
+Next token is token '\n' (4.10-5.0: )
+Shifting token '\n' (4.10-5.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (4.1-9: -1)
+   $2 = token '\n' (4.10-5.0: )
+-> $$ = nterm line (4.1-5.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-4.0: )
+   $2 = nterm line (4.1-5.0: )
+-> $$ = nterm input (1.1-5.0: )
+Entering state 6
 Reading a token
-Next token is token "number" (1.1: 2)
-Shifting token "number" (1.1: 2)
+Next token is token '(' (5.1: )
+Shifting token '(' (5.1: )
+Entering state 4
+Reading a token
+Next token is token '-' (5.2: )
+Shifting token '-' (5.2: )
+Entering state 2
+Reading a token
+Next token is token "number" (5.3: 1)
+Shifting token "number" (5.3: 1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 2)
--> $$ = nterm exp (1.1: 2)
-Entering state 31
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (5.3: 1)
+-> $$ = nterm exp (5.3: 1)
+Entering state 10
 Reading a token
-Next token is token '*' (1.1: )
-Reducing stack 0 by rule 9 (line 100):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '*' (1.1: )
-   $3 = nterm exp (1.1: 2)
--> $$ = nterm exp (1.1: 2)
+Next token is token ')' (5.4: )
+Reducing stack 0 by rule 11 (line 123):
+   $1 = token '-' (5.2: )
+   $2 = nterm exp (5.3: 1)
+-> $$ = nterm exp (5.2-3: -1)
 Entering state 12
-Next token is token '*' (1.1: )
-Shifting token '*' (1.1: )
-Entering state 22
+Next token is token ')' (5.4: )
+Shifting token ')' (5.4: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 125):
+   $1 = token '(' (5.1: )
+   $2 = nterm exp (5.2-3: -1)
+   $3 = token ')' (5.4: )
+-> $$ = nterm exp (5.1-4: -1)
+Entering state 8
 Reading a token
-Next token is token '*' (1.1: )
-syntax error
-Error: popping token '*' (1.1: )
-Error: popping nterm exp (1.1: 2)
-Shifting token error (1.1: )
-Entering state 11
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
+Next token is token '^' (5.5: )
+Shifting token '^' (5.5: )
+Entering state 24
 Reading a token
-Next token is token ')' (1.1: )
-Entering state 11
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 30
+Next token is token "number" (5.6: 2)
+Shifting token "number" (5.6: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (5.6: 2)
+-> $$ = nterm exp (5.6: 2)
+Entering state 33
 Reading a token
-Next token is token '=' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 3333)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 4444)
+Next token is token '=' (5.8: )
+Reducing stack 0 by rule 12 (line 124):
+   $1 = nterm exp (5.1-4: -1)
+   $2 = token '^' (5.5: )
+   $3 = nterm exp (5.6: 2)
+-> $$ = nterm exp (5.1-6: 1)
 Entering state 8
-Next token is token '=' (1.1: )
-Shifting token '=' (1.1: )
+Next token is token '=' (5.8: )
+Shifting token '=' (5.8: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token "number" (5.10: 1)
+Shifting token "number" (5.10: 1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (5.10: 1)
+-> $$ = nterm exp (5.10: 1)
 Entering state 28
 Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack 0 by rule 6 (line 82):
-   $1 = nterm exp (1.1: 4444)
-   $2 = token '=' (1.1: )
-   $3 = nterm exp (1.1: 1)
-error: 4444 != 1
--> $$ = nterm exp (1.1: 4444)
+Next token is token '\n' (5.11-6.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (5.1-6: 1)
+   $2 = token '=' (5.8: )
+   $3 = nterm exp (5.10: 1)
+-> $$ = nterm exp (5.1-10: 1)
 Entering state 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
+Next token is token '\n' (5.11-6.0: )
+Shifting token '\n' (5.11-6.0: )
 Entering state 25
-Reducing stack 0 by rule 4 (line 77):
-   $1 = nterm exp (1.1: 4444)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (5.1-10: 1)
+   $2 = token '\n' (5.11-6.0: )
+-> $$ = nterm line (5.1-6.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-5.0: )
+   $2 = nterm line (5.1-6.0: )
+-> $$ = nterm input (1.1-6.0: )
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" (1.1: )
-Entering state 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1403: cat stderr
-513. calc.at:1403: stderr:
-./calc.at:1405: cat stderr
- ok
-Starting parse
-Entering state 0
+Next token is token '\n' (6.1-7.0: )
+Shifting token '\n' (6.1-7.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (6.1-7.0: )
+-> $$ = nterm line (6.1-7.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-6.0: )
+   $2 = nterm line (6.1-7.0: )
+-> $$ = nterm input (1.1-7.0: )
+Entering state 6
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Next token is token '-' (7.1: )
+Shifting token '-' (7.1: )
+Entering state 2
 Reading a token
-Next token is token ')' (1.1: )
-syntax error
-Shifting token error (1.1: )
-Entering state 11
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
+Next token is token '-' (7.2: )
+Shifting token '-' (7.2: )
+Entering state 2
+Reading a token
+Next token is token '-' (7.3: )
+Shifting token '-' (7.3: )
+Entering state 2
+Reading a token
+Next token is token "number" (7.4: 1)
+Shifting token "number" (7.4: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (7.4: 1)
+-> $$ = nterm exp (7.4: 1)
+Entering state 10
+Reading a token
+Next token is token '=' (7.6: )
+Reducing stack 0 by rule 11 (line 123):
+   $1 = token '-' (7.3: )
+   $2 = nterm exp (7.4: 1)
+-> $$ = nterm exp (7.3-4: -1)
+Entering state 10
+Next token is token '=' (7.6: )
+Reducing stack 0 by rule 11 (line 123):
+   $1 = token '-' (7.2: )
+   $2 = nterm exp (7.3-4: -1)
+-> $$ = nterm exp (7.2-4: 1)
+Entering state 10
+Next token is token '=' (7.6: )
+Reducing stack 0 by rule 11 (line 123):
+   $1 = token '-' (7.1: )
+   $2 = nterm exp (7.2-4: 1)
+-> $$ = nterm exp (7.1-4: -1)
 Entering state 8
+Next token is token '=' (7.6: )
+Shifting token '=' (7.6: )
+Entering state 19
 Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
+Next token is token '-' (7.8: )
+Shifting token '-' (7.8: )
+Entering state 2
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
+Next token is token "number" (7.9: 1)
+Shifting token "number" (7.9: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (7.9: 1)
+-> $$ = nterm exp (7.9: 1)
+Entering state 10
+Reading a token
+Next token is token '\n' (7.10-8.0: )
+Reducing stack 0 by rule 11 (line 123):
+   $1 = token '-' (7.8: )
+   $2 = nterm exp (7.9: 1)
+-> $$ = nterm exp (7.8-9: -1)
+Entering state 28
+Next token is token '\n' (7.10-8.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (7.1-4: -1)
+   $2 = token '=' (7.6: )
+   $3 = nterm exp (7.8-9: -1)
+-> $$ = nterm exp (7.1-9: -1)
+Entering state 8
+Next token is token '\n' (7.10-8.0: )
+Shifting token '\n' (7.10-8.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (7.1-9: -1)
+   $2 = token '\n' (7.10-8.0: )
+-> $$ = nterm line (7.1-8.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-7.0: )
+   $2 = nterm line (7.1-8.0: )
+-> $$ = nterm input (1.1-8.0: )
+Entering state 6
+Reading a token
+Next token is token '\n' (8.1-9.0: )
+Shifting token '\n' (8.1-9.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (8.1-9.0: )
+-> $$ = nterm line (8.1-9.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-8.0: )
+   $2 = nterm line (8.1-9.0: )
+-> $$ = nterm input (1.1-9.0: )
+Entering state 6
+Reading a token
+Next token is token "number" (9.1: 1)
+Shifting token "number" (9.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (9.1: 1)
+-> $$ = nterm exp (9.1: 1)
+Entering state 8
+Reading a token
+Next token is token '-' (9.3: )
+Shifting token '-' (9.3: )
+Entering state 20
+Reading a token
+Next token is token "number" (9.5: 2)
+Shifting token "number" (9.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (9.5: 2)
+-> $$ = nterm exp (9.5: 2)
+Entering state 29
+Reading a token
+Next token is token '-' (9.7: )
+Reducing stack 0 by rule 8 (line 106):
+   $1 = nterm exp (9.1: 1)
+   $2 = token '-' (9.3: )
+   $3 = nterm exp (9.5: 2)
+-> $$ = nterm exp (9.1-5: -1)
+Entering state 8
+Next token is token '-' (9.7: )
+Shifting token '-' (9.7: )
+Entering state 20
+Reading a token
+Next token is token "number" (9.9: 3)
+Shifting token "number" (9.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (9.9: 3)
+-> $$ = nterm exp (9.9: 3)
+Entering state 29
+Reading a token
+Next token is token '=' (9.11: )
+Reducing stack 0 by rule 8 (line 106):
+   $1 = nterm exp (9.1-5: -1)
+   $2 = token '-' (9.7: )
+   $3 = nterm exp (9.9: 3)
+-> $$ = nterm exp (9.1-9: -4)
+Entering state 8
+Next token is token '=' (9.11: )
+Shifting token '=' (9.11: )
+Entering state 19
+Reading a token
+Next token is token '-' (9.13: )
+Shifting token '-' (9.13: )
+Entering state 2
+Reading a token
+Next token is token "number" (9.14: 4)
+Shifting token "number" (9.14: 4)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (9.14: 4)
+-> $$ = nterm exp (9.14: 4)
+Entering state 10
+Reading a token
+Next token is token '\n' (9.15-10.0: )
+Reducing stack 0 by rule 11 (line 123):
+   $1 = token '-' (9.13: )
+   $2 = nterm exp (9.14: 4)
+-> $$ = nterm exp (9.13-14: -4)
+Entering state 28
+Next token is token '\n' (9.15-10.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (9.1-9: -4)
+   $2 = token '=' (9.11: )
+   $3 = nterm exp (9.13-14: -4)
+-> $$ = nterm exp (9.1-14: -4)
+Entering state 8
+Next token is token '\n' (9.15-10.0: )
+Shifting token '\n' (9.15-10.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (9.1-14: -4)
+   $2 = token '\n' (9.15-10.0: )
+-> $$ = nterm line (9.1-10.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-9.0: )
+   $2 = nterm line (9.1-10.0: )
+-> $$ = nterm input (1.1-10.0: )
+Entering state 6
+Reading a token
+Next token is token "number" (10.1: 1)
+Shifting token "number" (10.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (10.1: 1)
+-> $$ = nterm exp (10.1: 1)
+Entering state 8
+Reading a token
+Next token is token '-' (10.3: )
+Shifting token '-' (10.3: )
+Entering state 20
+Reading a token
+Next token is token '(' (10.5: )
+Shifting token '(' (10.5: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token "number" (10.6: 2)
+Shifting token "number" (10.6: 2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (10.6: 2)
+-> $$ = nterm exp (10.6: 2)
 Entering state 12
 Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
+Next token is token '-' (10.8: )
+Shifting token '-' (10.8: )
+Entering state 20
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token "number" (10.10: 3)
+Shifting token "number" (10.10: 3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 30
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (10.10: 3)
+-> $$ = nterm exp (10.10: 3)
+Entering state 29
 Reading a token
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1)
--> $$ = nterm exp (1.1: 2)
+Next token is token ')' (10.11: )
+Reducing stack 0 by rule 8 (line 106):
+   $1 = nterm exp (10.6: 2)
+   $2 = token '-' (10.8: )
+   $3 = nterm exp (10.10: 3)
+-> $$ = nterm exp (10.6-10: -1)
 Entering state 12
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
+Next token is token ')' (10.11: )
+Shifting t1.14: memory exhausted
+oken ')' (10.11: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 125):
+   $1 = token '(' (10.5: )
+   $2 = nterm exp (10.6-10: -1)
+   $3 = token ')' (10.11: )
+-> $$ = nterm exp (10.5-11: -1)
+Entering state 29
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token '=' (10.13: )
+Reducing stack 0 by rule 8 (line 106):
+   $1 = nterm exp (10.1: 1)
+   $2 = token '-' (10.3: )
+   $3 = nterm exp (10.5-11: -1)
+-> $$ = nterm exp (10.1-11: 2)
+Entering state 8
+Next token is token '=' (10.13: )
+Shifting token '=' (10.13: )
+Entering state 19
+Reading a token
+Next token is token "number" (10.15: 2)
+Shifting token "number" (10.15: 2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 30
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (10.15: 2)
+-> $$ = nterm exp (10.15: 2)
+Entering state 28
 Reading a token
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 2)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1)
--> $$ = nterm exp (1.1: 3)
-Entering state 12
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
+Next token is token '\n' (10.16-11.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (10.1-11: 2)
+   $2 = token '=' (10.13: )
+   $3 = nterm exp (10.15: 2)
+-> $$ = nterm exp (10.1-15: 2)
+Entering state 8
+Next token is token '\n' (10.16-11.0: )
+Shifting token '\n' (10.16-11.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (10.1-15: 2)
+   $2 = token '\n' (10.16-11.0: )
+-> $$ = nterm line (10.1-11.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-10.0: )
+   $2 = nterm line (10.1-11.0: )
+-> $$ = nterm input (1.1-11.0: )
+Entering state 6
 Reading a token
-Next token is token ')' (1.1: )
-syntax error
-Error: popping token '+' (1.1: )
-Error: popping nterm exp (1.1: 3)
-Shifting token error (1.1: )
-Entering state 11
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 30
+Next token is token '\n' (11.1-12.0: )
+Shifting token '\n' (11.1-12.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (11.1-12.0: )
+-> $$ = nterm line (11.1-12.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-11.0: )
+   $2 = nterm line (11.1-12.0: )
+-> $$ = nterm input (1.1-12.0: )
+Entering state 6
 Reading a token
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 2222)
+Next token is token "number" (12.1: 2)
+Shifting token "number" (12.1: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (12.1: 2)
+-> $$ = nterm exp (12.1: 2)
 Entering state 8
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Next token is token '^' (12.2: )
+Shifting token '^' (12.2: )
+Entering state 24
 Reading a token
-Next token is token '*' (1.1: )
-syntax error
-Shifting token error (1.1: )
-Entering state 11
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
+Next token is token "number" (12.3: 2)
+Shifting token "number" (12.3: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (12.3: 2)
+-> $$ = nterm exp (12.3: 2)
+Entering state 33
 Reading a token
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
+Next token is token '^' (12.4: )
+Shifting token '^' (12.4: )
+Entering state 24
 Reading a token
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
+Next token is token "number" (12.5: 3)
+Shifting token "number" (12.5: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (12.5: 3)
+-> $$ = nterm exp (12.5: 3)
+Entering state 33
 Reading a token
-Next token is token ')' (1.1: )
-Entering state 11
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 30
+Next token is token '=' (12.7: )
+Reducing stack 0 by rule 12 (line 124):
+   $1 = nterm exp (12.3: 2)
+   $2 = token '^' (12.4: )
+   $3 = nterm exp (12.5: 3)
+-> $$ = nterm exp (12.3-5: 8)
+Entering state 33
+Next token is token '=' (12.7: )
+Reducing stack 0 by rule 12 (line 124):
+   $1 = nterm exp (12.1: 2)
+   $2 = token '^' (12.2: )
+   $3 = nterm exp (12.3-5: 8)
+-> $$ = nterm exp (12.1-5: 256)
+Entering state 8
+Next token is token '=' (12.7: )
+Shifting token '=' (12.7: )
+Entering state 19
 Reading a token
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 3333)
+Next token is token "number" (12.9-11: 256)
+Shifting token "number" (12.9-11: 256)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (12.9-11: 256)
+-> $$ = nterm exp (12.9-11: 256)
+Entering state 28
+Reading a token
+Next token is token '\n' (12.12-13.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (12.1-5: 256)
+   $2 = token '=' (12.7: )
+   $3 = nterm exp (12.9-11: 256)
+-> $$ = nterm exp (12.1-11: 256)
 Entering state 8
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
+Next token is token '\n' (12.12-13.0: )
+Shifting token '\n' (12.12-13.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (12.1-11: 256)
+   $2 = token '\n' (12.12-13.0: )
+-> $$ = nterm line (12.1-13.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-12.0: )
+   $2 = nterm line (12.1-13.0: )
+-> $$ = nterm input (1.1-13.0: )
+Entering state 6
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
+Next token is token '(' (13.1: )
+Shifting token '(' (13.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token "number" (13.2: 2)
+Shifting token "number" (13.2: 2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (13.2: 2)
+-> $$ = nterm exp (13.2: 2)
 Entering state 12
 Reading a token
-Next token is token '*' (1.1: )
-Shifting token '*' (1.1: )
-Entering state 22
+Next token is token '^' (13.3: )
+Shifting token '^' (13.3: )
+Entering state 24
 Reading a token
-Next token is token "number" (1.1: 2)
-Shifting token "number" (1.1: 2)
+Next token is token "number" (13.4: 2)
+Shifting token "number" (13.4: 2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 2)
--> $$ = nterm exp (1.1: 2)
-Entering state 31
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (13.4: 2)
+-> $$ = nterm exp (13.4: 2)
+Entering state 33
 Reading a token
-Next token is token '*' (1.1: )
-Reducing stack 0 by rule 9 (line 100):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '*' (1.1: )
-   $3 = nterm exp (1.1: 2)
--> $$ = nterm exp (1.1: 2)
+Next token is token ')' (13.5: )
+Reducing stack 0 by rule 12 (line 124):
+   $1 = nterm exp (13.2: 2)
+   $2 = token '^' (13.3: )
+   $3 = nterm exp (13.4: 2)
+-> $$ = nterm exp (13.2-4: 4)
 Entering state 12
-Next token is token '*' (1.1: )
-Shifting token '*' (1.1: )
-Entering state 22
+Next token is token ')' (13.5: )
+Shifting token ')' (13.5: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 125):
+   $1 = token '(' (13.1: )
+   $2 = nterm exp (13.2-4: 4)
+   $3 = token ')' (13.5: )
+-> $$ = nterm exp (13.1-5: 4)
+Entering state 8
 Reading a token
-Next token is token '*' (1.1: )
-syntax error
-Error: popping token '*' (1.1: )
-Error: popping nterm exp (1.1: 2)
-Shifting token error (1.1: )
-Entering state 11
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
+Next token is token '^' (13.6: )
+Shifting token '^' (13.6: )
+Entering state 24
 Reading a token
-Next token is token ')' (1.1: )
-Entering state 11
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 30
+Next token is token "number" (13.7: 3)
+Shifting token "number" (13.7: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (13.7: 3)
+-> $$ = nterm exp (13.7: 3)
+Entering state 33
 Reading a token
-Next token is token '=' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 3333)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 4444)
+Next token is token '=' (13.9: )
+Reducing stack 0 by rule 12 (line 124):
+   $1 = nterm exp (13.1-5: 4)
+   $2 = token '^' (13.6: )
+   $3 = nterm exp (13.7: 3)
+-> $$ = nterm exp (13.1-7: 64)
 Entering state 8
-Next token is token '=' (1.1: )
-Shifting token '=' (1.1: )
+Next token is token '=' (13.9: )
+Shifting token '=' (13.9: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token "number" (13.11-12: 64)
+Shifting token "number" (13.11-12: 64)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (13.11-12: 64)
+-> $$ = nterm exp (13.11-12: 64)
 Entering state 28
 Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack 0 by rule 6 (line 82):
-   $1 = nterm exp (1.1: 4444)
-   $2 = token '=' (1.1: )
-   $3 = nterm exp (1.1: 1)
-error: 4444 != 1
--> $$ = nterm exp (1.1: 4444)
+Next token is token '\n' (13.13-14.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (13.1-7: 64)
+   $2 = token '=' (13.9: )
+   $3 = nterm exp (13.11-12: 64)
+-> $$ = nterm exp (13.1-12: 64)
 Entering state 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
+Next token is token '\n' (13.13-14.0: )
+Shifting token '\n' (13.13-14.0: )
 Entering state 25
-Reducing stack 0 by rule 4 (line 77):
-   $1 = nterm exp (1.1: 4444)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (13.1-12: 64)
+   $2 = token '\n' (13.13-14.0: )
+-> $$ = nterm line (13.1-14.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-13.0: )
+   $2 = nterm line (13.1-14.0: )
+-> $$ = nterm input (1.1-14.0: )
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token "end of input" (1.1: )
+Shifting token "end of input" (14.1: )
 Entering state 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
+Cleanup: popping token "end of input" (14.1: )
+Cleanup: popping nterm input (1.1-14.0: )
+./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "invalid token" (1.1: )
+syntax error
+Cleanup: discarding lookahead token "invalid token" (1.1: )
+./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | (1 + 1) / (1 - 1)
+  | (- *) + (1 2) = 1
 ./calc.at:1405:  $PREPARSER ./calc  input
-stderr:
-1.11-17: error: null divisor
-
-./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1407: "$PERL" -pi -e 'use strict;
+./calc.at:1402: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -94373,894 +94267,953 @@
   }eg
 ' expout || exit 77
 stderr:
-1.11-17: error: null divisor
-./calc.at:1405: cat stderr
-514. calc.at:1405:  ok
-./calc.at:1407: cat stderr
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1407:  $PREPARSER ./calc  input
+./calc.at:1403: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
-
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '!' (1.1: )
-Shifting token '!' (1.1: )
-Entering state 5
-Reading a token
-Next token is token '!' (1.1: )
-Shifting token '!' (1.1: )
-Entering state 16
-Reducing stack 0 by rule 16 (line 121):
-   $1 = token '!' (1.1: )
-   $2 = token '!' (1.1: )
-Shifting token error (1.1: )
-Entering state 11
-Reading a token
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
 Next token is token "number" (1.1: 1)
 Shifting token "number" (1.1: 1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
+Reducing stack 0 by rule 5 (line 88):
    $1 = token "number" (1.1: 1)
 -> $$ = nterm exp (1.1: 1)
-Entering state 12
+Entering state 8
 Reading a token
-Next token is token "number" (1.1: 2)
-syntax error
-Error: popping nterm exp (1.1: 1)
-Shifting token error (1.1: )
-Entering state 11
-Next token is token "number" (1.1: 2)
-Error: discarding token "number" (1.1: 2)
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
 Reading a token
-Next token is token ')' (1.1: )
-Entering state 11
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
 Entering state 30
 Reading a token
-Next token is token '=' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 2222)
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 9 (line 107):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
-Next token is token '=' (1.1: )
-Shifting token '=' (1.1: )
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token "number" (1.13: 7)
+Shifting token "number" (1.13: 7)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.13: 7)
+-> $$ = nterm exp (1.13: 7)
 Entering state 28
 Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack 0 by rule 6 (line 82):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '=' (1.1: )
-   $3 = nterm exp (1.1: 1)
-error: 2222 != 1
--> $$ = nterm exp (1.1: 2222)
+Next token is token '\n' (1.14-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-9: 7)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13: 7)
+-> $$ = nterm exp (1.1-13: 7)
 Entering state 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
+Next token is token '\n' (1.14-2.0: )
+Shifting token '\n' (1.14-2.0: )
 Entering state 25
-Reducing stack 0 by rule 4 (line 77):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-13: 7)
+   $2 = token '\n' (1.14-2.0: )
+-> $$ = nterm line (1.1-2.0: )
 Entering state 7
-Reducing stack 0 by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" (1.1: )
-Entering state 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '!' (1.1: )
-Shifting token '!' (1.1: )
-Entering state 5
-Reading a token
-Next token is token '!' (1.1: )
-Shifting token '!' (1.1: )
-Entering state 16
-Reducing stack 0 by rule 16 (line 121):
-   $1 = token '!' (1.1: )
-   $2 = token '!' (1.1: )
-Shifting token error (1.1: )
-Entering state 11
-Reading a token
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
+Next token is token "number" (2.1: 1)
+Shifting token "number" (2.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (2.1: 1)
+-> $$ = nterm exp (2.1: 1)
 Entering state 8
 Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
+Next token is token '+' (2.3: )
+Shifting token '+' (2.3: )
 Entering state 21
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token "number" (2.5: 2)
+Shifting token "number" (2.5: 2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 12
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (2.5: 2)
+-> $$ = nterm exp (2.5: 2)
+Entering state 30
 Reading a token
-Next token is token "number" (1.1: 2)
-syntax error
-Error: popping nterm exp (1.1: 1)
-Shifting token error (1.1: )
-Entering state 11
-Next token is token "number" (1.1: 2)
-Error: discarding token "number" (1.1: 2)
+Next token is token '*' (2.7: )
+Shifting token '*' (2.7: )
+Entering state 22
 Reading a token
-Next token is token ')' (1.1: )
-Entering state 11
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 30
+Next token is token '-' (2.9: )
+Shifting token '-' (2.9: )
+Entering state 2
 Reading a token
-Next token is token '=' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 2222)
+Next token is token "number" (2.10: 3)
+Shifting token "number" (2.10: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (2.10: 3)
+-> $$ = nterm exp (2.10: 3)
+Entering state 10
+Reading a token
+Next token is token '=' (2.12: )
+Reducing stack 0 by rule 11 (line 123):
+   $1 = token '-' (2.9: )
+   $2 = nterm exp (2.10: 3)
+-> $$ = nterm exp (2.9-10: -3)
+Entering state 31
+Next token is token '=' (2.12: )
+Reducing stack 0 by rule 9 (line 107):
+   $1 = nterm exp (2.5: 2)
+   $2 = token '*' (2.7: )
+   $3 = nterm exp (2.9-10: -3)
+-> $$ = nterm exp (2.5-10: -6)
+Entering state 30
+Next token is token '=' (2.12: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (2.1: 1)
+   $2 = token '+' (2.3: )
+   $3 = nterm exp (2.5-10: -6)
+-> $$ = nterm exp (2.1-10: -5)
 Entering state 8
-Next token is token '=' (1.1: )
-Shifting token '=' (1.1: )
+Next token is token '=' (2.12: )
+Shifting token '=' (2.12: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token '-' (2.14: )
+Shifting token '-' (2.14: )
+Entering state 2
+Reading a token
+Next token is token "number" (2.15: 5)
+Shifting token "number" (2.15: 5)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 28
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (2.15: 5)
+-> $$ = nterm exp (2.15: 5)
+Entering state 10
 Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack 0 by rule 6 (line 82):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '=' (1.1: )
-   $3 = nterm exp (1.1: 1)
-error: 2222 != 1
--> $$ = nterm exp (1.1: 2222)
+Next token is token '\n' (2.16-3.0: )
+Reducing stack 0 by rule 11 (line 123):
+   $1 = token '-' (2.14: )
+   $2 = nterm exp (2.15: 5)
+-> $$ = nterm exp (2.14-15: -5)
+Entering state 28
+Next token is token '\n' (2.16-3.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (2.1-10: -5)
+   $2 = token '=' (2.12: )
+   $3 = nterm exp (2.14-15: -5)
+-> $$ = nterm exp (2.1-15: -5)
 Entering state 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
+Next token is token '\n' (2.16-3.0: )
+Shifting token '\n' (2.16-3.0: )
 Entering state 25
-Reducing stack 0 by rule 4 (line 77):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (2.1-15: -5)
+   $2 = token '\n' (2.16-3.0: )
+-> $$ = nterm line (2.1-3.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-2.0: )
+   $2 = nterm line (2.1-3.0: )
+-> $$ = nterm input (1.1-3.0: )
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" (1.1: )
-Entering state 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-./calc.at:1407: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-521. calc.at:1416: testing Calculator %glr-parser %no-lines api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
-./calc.at:1416: mv calc.y.tmp calc.y
-
-./calc.at:1407: cat stderr
-./calc.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1407:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Next token is token '\n' (3.1-4.0: )
+Shifting token '\n' (3.1-4.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (3.1-4.0: )
+-> $$ = nterm line (3.1-4.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-3.0: )
+   $2 = nterm line (3.1-4.0: )
+-> $$ = nterm input (1.1-4.0: )
+Entering state 6
 Reading a token
-Next token is token '-' (1.1: )
-Shifting token '-' (1.1: )
+Next token is token '-' (4.1: )
+Shifting token '-' (4.1: )
 Entering state 2
 Reading a token
-Next token is token '*' (1.1: )
-syntax error
-Shifting token error (1.1: )
-Entering state 9
-Reducing stack 0 by rule 15 (line 120):
-   $1 = token '-' (1.1: )
-   $2 = token error (1.1: )
-Shifting token error (1.1: )
-Entering state 11
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
+Next token is token "number" (4.2: 1)
+Shifting token "number" (4.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (4.2: 1)
+-> $$ = nterm exp (4.2: 1)
+Entering state 10
 Reading a token
-Next token is token ')' (1.1: )
-Entering state 11
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
+Next token is token '^' (4.3: )
+Shifting token '^' (4.3: )
+Entering state 24
+Reading a token
+Next token is token "number" (4.4: 2)
+Shifting token "number" (4.4: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (4.4: 2)
+-> $$ = nterm exp (4.4: 2)
+Entering state 33
+Reading a token
+Next token is token '=' (4.6: )
+Reducing stack 0 by rule 12 (line 124):
+   $1 = nterm exp (4.2: 1)
+   $2 = token '^' (4.3: )
+   $3 = nterm exp (4.4: 2)
+-> $$ = nterm exp (4.2-4: 1)
+Entering state 10
+Next token is token '=' (4.6: )
+Reducing stack 0 by rule 11 (line 123):
+   $1 = token '-' (4.1: )
+   $2 = nterm exp (4.2-4: 1)
+-> $$ = nterm exp (4.1-4: -1)
 Entering state 8
+Next token is token '=' (4.6: )
+Shifting token '=' (4.6: )
+Entering state 19
 Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
+Next token is token '-' (4.8: )
+Shifting token '-' (4.8: )
+Entering state 2
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
+Next token is token "number" (4.9: 1)
+Shifting token "number" (4.9: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (4.9: 1)
+-> $$ = nterm exp (4.9: 1)
+Entering state 10
+Reading a token
+Next token is token '\n' (4.10-5.0: )
+Reducing stack 0 by rule 11 (line 123):
+   $1 = token '-' (4.8: )
+   $2 = nterm exp (4.9: 1)
+-> $$ = nterm exp (4.8-9: -1)
+Entering state 28
+Next token is token '\n' (4.10-5.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (4.1-4: -1)
+   $2 = token '=' (4.6: )
+   $3 = nterm exp (4.8-9: -1)
+-> $$ = nterm exp (4.1-9: -1)
+Entering state 8
+Next token is token '\n' (4.10-5.0: )
+Shifting token '\n' (4.10-5.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (4.1-9: -1)
+   $2 = token '\n' (4.10-5.0: )
+-> $$ = nterm line (4.1-5.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-4.0: )
+   $2 = nterm line (4.1-5.0: )
+-> $$ = nterm input (1.1-5.0: )
+Entering state 6
+Reading a token
+Next token is token '(' (5.1: )
+Shifting token '(' (5.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token '-' (5.2: )
+Shifting token '-' (5.2: )
+Entering state 2
+Reading a token
+Next token is token "number" (5.3: 1)
+Shifting token "number" (5.3: 1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (5.3: 1)
+-> $$ = nterm exp (5.3: 1)
+Entering state 10
+Reading a token
+Next token is token ')' (5.4: )
+Reducing stack 0 by rule 11 (line 123):
+   $1 = token '-' (5.2: )
+   $2 = nterm exp (5.3: 1)
+-> $$ = nterm exp (5.2-3: -1)
 Entering state 12
+Next token is token ')' (5.4: )
+Shifting token ')' (5.4: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 125):
+   $1 = token '(' (5.1: )
+   $2 = nterm exp (5.2-3: -1)
+   $3 = token ')' (5.4: )
+-> $$ = nterm exp (5.1-4: -1)
+Entering state 8
 Reading a token
-Next token is token "number" (1.1: 2)
-syntax error
-Error: popping nterm exp (1.1: 1)
-Shifting token error (1.1: )
-Entering state 11
-Next token is token "number" (1.1: 2)
-Error: discarding token "number" (1.1: 2)
+Next token is token '^' (5.5: )
+Shifting token '^' (5.5: )
+Entering state 24
 Reading a token
-Next token is token ')' (1.1: )
-Entering state 11
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 30
+Next token is token "number" (5.6: 2)
+Shifting token "number" (5.6: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (5.6: 2)
+-> $$ = nterm exp (5.6: 2)
+Entering state 33
 Reading a token
-Next token is token '=' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 2222)
+Next token is token '=' (5.8: )
+Reducing stack 0 by rule 12 (line 124):
+   $1 = nterm exp (5.1-4: -1)
+   $2 = token '^' (5.5: )
+   $3 = nterm exp (5.6: 2)
+-> $$ = nterm exp (5.1-6: 1)
 Entering state 8
-Next token is token '=' (1.1: )
-Shifting token '=' (1.1: )
+Next token is token '=' (5.8: )
+Shifting token '=' (5.8: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token "number" (5.10: 1)
+Shifting token "number" (5.10: 1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (5.10: 1)
+-> $$ = nterm exp (5.10: 1)
 Entering state 28
 Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack 0 by rule 6 (line 82):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '=' (1.1: )
-   $3 = nterm exp (1.1: 1)
-error: 2222 != 1
--> $$ = nterm exp (1.1: 2222)
+Next token is token '\n' (5.11-6.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (5.1-6: 1)
+   $2 = token '=' (5.8: )
+   $3 = nterm exp (5.10: 1)
+-> $$ = nterm exp (5.1-10: 1)
 Entering state 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
+Next token is token '\n' (5.11-6.0: )
+Shifting token '\n' (5.11-6.0: )
 Entering state 25
-Reducing stack 0 by rule 4 (line 77):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (5.1-10: 1)
+   $2 = token '\n' (5.11-6.0: )
+-> $$ = nterm line (5.1-6.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-5.0: )
+   $2 = nterm line (5.1-6.0: )
+-> $$ = nterm input (1.1-6.0: )
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" (1.1: )
-Entering state 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
+Next token is token '\n' (6.1-7.0: )
+Shifting token '\n' (6.1-7.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (6.1-7.0: )
+-> $$ = nterm line (6.1-7.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-6.0: )
+   $2 = nterm line (6.1-7.0: )
+-> $$ = nterm input (1.1-7.0: )
+Entering state 6
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Next token is token '-' (7.1: )
+Shifting token '-' (7.1: )
+Entering state 2
 Reading a token
-Next token is token '-' (1.1: )
-Shifting token '-' (1.1: )
+Next token is token '-' (7.2: )
+Shifting token '-' (7.2: )
 Entering state 2
 Reading a token
-Next token is token '*' (1.1: )
-syntax error
-Shifting token error (1.1: )
-Entering state 9
-Reducing stack 0 by rule 15 (line 120):
-   $1 = token '-' (1.1: )
-   $2 = token error (1.1: )
-Shifting token error (1.1: )
-Entering state 11
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
+Next token is token '-' (7.3: )
+Shifting token '-' (7.3: )
+Entering state 2
 Reading a token
-Next token is token ')' (1.1: )
-Entering state 11
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 8
+Next token is token "number" (7.4: 1)
+Shifting token "number" (7.4: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (7.4: 1)
+-> $$ = nterm exp (7.4: 1)
+Entering state 10
 Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
+Next token is token '=' (7.6: )
+Reducing stack 0 by rule 11 (line 123):
+   $1 = token '-' (7.3: )
+   $2 = nterm exp (7.4: 1)
+-> $$ = nterm exp (7.3-4: -1)
+Entering state 10
+Next token is token '=' (7.6: )
+Reducing stack 0 by rule 11 (line 123):
+   $1 = token '-' (7.2: )
+   $2 = nterm exp (7.3-4: -1)
+-> $$ = nterm exp (7.2-4: 1)
+Entering state 10
+Next token is token '=' (7.6: )
+Reducing stack 0 by rule 11 (line 123):
+   $1 = token '-' (7.1: )
+   $2 = nterm exp (7.2-4: 1)
+-> $$ = nterm exp (7.1-4: -1)
+Entering state 8
+Next token is token '=' (7.6: )
+Shifting token '=' (7.6: )
+Entering state 19
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Next token is token '-' (7.8: )
+Shifting token '-' (7.8: )
+Entering state 2
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token "number" (7.9: 1)
+Shifting token "number" (7.9: 1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 12
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (7.9: 1)
+-> $$ = nterm exp (7.9: 1)
+Entering state 10
 Reading a token
-Next token is token "number" (1.1: 2)
-syntax error
-Error: popping nterm exp (1.1: 1)
-Shifting token error (1.1: )
-Entering state 11
-Next token is token "number" (1.1: 2)
-Error: discarding token "number" (1.1: 2)
+Next token is token '\n' (7.10-8.0: )
+Reducing stack 0 by rule 11 (line 123):
+   $1 = token '-' (7.8: )
+   $2 = nterm exp (7.9: 1)
+-> $$ = nterm exp (7.8-9: -1)
+Entering state 28
+Next token is token '\n' (7.10-8.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (7.1-4: -1)
+   $2 = token '=' (7.6: )
+   $3 = nterm exp (7.8-9: -1)
+-> $$ = nterm exp (7.1-9: -1)
+Entering state 8
+Next token is token '\n' (7.10-8.0: )
+Shifting token '\n' (7.10-8.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (7.1-9: -1)
+   $2 = token '\n' (7.10-8.0: )
+-> $$ = nterm line (7.1-8.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-7.0: )
+   $2 = nterm line (7.1-8.0: )
+-> $$ = nterm input (1.1-8.0: )
+Entering state 6
 Reading a token
-Next token is token ')' (1.1: )
-Entering state 11
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 30
+Next token is token '\n' (8.1-9.0: )
+Shifting token '\n' (8.1-9.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (8.1-9.0: )
+-> $$ = nterm line (8.1-9.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-8.0: )
+   $2 = nterm line (8.1-9.0: )
+-> $$ = nterm input (1.1-9.0: )
+Entering state 6
 Reading a token
-Next token is token '=' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 2222)
+Next token is token "number" (9.1: 1)
+Shifting token "number" (9.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (9.1: 1)
+-> $$ = nterm exp (9.1: 1)
 Entering state 8
-Next token is token '=' (1.1: )
-Shifting token '=' (1.1: )
-Entering state 19
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token '-' (9.3: )
+Shifting token '-' (9.3: )
+Entering state 20
+Reading a token
+Next token is token "number" (9.5: 2)
+Shifting token "number" (9.5: 2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 28
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (9.5: 2)
+-> $$ = nterm exp (9.5: 2)
+Entering state 29
 Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack 0 by rule 6 (line 82):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '=' (1.1: )
-   $3 = nterm exp (1.1: 1)
-error: 2222 != 1
--> $$ = nterm exp (1.1: 2222)
+Next token is token '-' (9.7: )
+Reducing stack 0 by rule 8 (line 106):
+   $1 = nterm exp (9.1: 1)
+   $2 = token '-' (9.3: )
+   $3 = nterm exp (9.5: 2)
+-> $$ = nterm exp (9.1-5: -1)
 Entering state 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 77):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
-Entering state 6
+Next token is token '-' (9.7: )
+Shifting token '-' (9.7: )
+Entering state 20
 Reading a token
-Now at end of input.
-Shifting token "end of input" (1.1: )
-Entering state 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-522. calc.at:1426: testing Calculator lalr1.cc %header  ...
-./calc.at:1407: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1426: mv calc.y.tmp calc.y
-
-./calc.at:1407: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1407:  $PREPARSER ./calc  input
-./calc.at:1426: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-stderr:
-Starting parse
-Entering state 0
+Next token is token "number" (9.9: 3)
+Shifting token "number" (9.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (9.9: 3)
+-> $$ = nterm exp (9.9: 3)
+Entering state 29
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Next token is token '=' (9.11: )
+Reducing stack 0 by rule 8 (line 106):
+   $1 = nterm exp (9.1-5: -1)
+   $2 = token '-' (9.7: )
+   $3 = nterm exp (9.9: 3)
+-> $$ = nterm exp (9.1-9: -4)
+Entering state 8
+Next token is token '=' (9.11: )
+Shifting token '=' (9.11: )
+Entering state 19
 Reading a token
-Next token is token '*' (1.1: )
-syntax error
-Shifting token error (1.1: )
-Entering state 11
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
+Next token is token '-' (9.13: )
+Shifting token '-' (9.13: )
+Entering state 2
 Reading a token
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
+Next token is token "number" (9.14: 4)
+Shifting token "number" (9.14: 4)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (9.14: 4)
+-> $$ = nterm exp (9.14: 4)
+Entering state 10
 Reading a token
-Next token is token ')' (1.1: )
-Entering state 11
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
+Next token is token '\n' (9.15-10.0: )
+Reducing stack 0 by rule 11 (line 123):
+   $1 = token '-' (9.13: )
+   $2 = nterm exp (9.14: 4)
+-> $$ = nterm exp (9.13-14: -4)
+Entering state 28
+Next token is token '\n' (9.15-10.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (9.1-9: -4)
+   $2 = token '=' (9.11: )
+   $3 = nterm exp (9.13-14: -4)
+-> $$ = nterm exp (9.1-14: -4)
 Entering state 8
+Next token is token '\n' (9.15-10.0: )
+Shifting token '\n' (9.15-10.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (9.1-14: -4)
+   $2 = token '\n' (9.15-10.0: )
+-> $$ = nterm line (9.1-10.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-9.0: )
+   $2 = nterm line (9.1-10.0: )
+-> $$ = nterm input (1.1-10.0: )
+Entering state 6
 Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
+Next token is token "number" (10.1: 1)
+Shifting token "number" (10.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (10.1: 1)
+-> $$ = nterm exp (10.1: 1)
+Entering state 8
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
+Next token is token '-' (10.3: )
+Shifting token '-' (10.3: )
+Entering state 20
+Reading a token
+Next token is token '(' (10.5: )
+Shifting token '(' (10.5: )
 Entering state 4
 Reading a token
-Next token is token '*' (1.1: )
-syntax error
-Shifting token error (1.1: )
-Entering state 11
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
+Next token is token "number" (10.6: 2)
+Shifting token "number" (10.6: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (10.6: 2)
+-> $$ = nterm exp (10.6: 2)
+Entering state 12
 Reading a token
-Next token is token ')' (1.1: )
-Entering state 11
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 30
+Next token is token '-' (10.8: )
+Shifting token '-' (10.8: )
+Entering state 20
 Reading a token
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 2222)
-Entering state 8
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
+Next token is token "number" (10.10: 3)
+Shifting token "number" (10.10: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (10.10: 3)
+-> $$ = nterm exp (10.10: 3)
+Entering state 29
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Next token is token ')' (10.11: )
+Reducing stack 0 by rule 8 (line 106):
+   $1 = nterm exp (10.6: 2)
+   $2 = token '-' (10.8: )
+   $3 = nterm exp (10.10: 3)
+-> $$ = nterm exp (10.6-10: -1)
+Entering state 12
+Next token is token ')' (10.11: )
+Shifting token ')' (10.11: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 125):
+   $1 = token '(' (10.5: )
+   $2 = nterm exp (10.6-10: -1)
+   $3 = token ')' (10.11: )
+-> $$ = nterm exp (10.5-11: -1)
+Entering state 29
 Reading a token
-Next token is token '*' (1.1: )
-syntax error
-Shifting token error (1.1: )
-Entering state 11
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
+Next token is token '=' (10.13: )
+Reducing stack 0 by rule 8 (line 106):
+   $1 = nterm exp (10.1: 1)
+   $2 = token '-' (10.3: )
+   $3 = nterm exp (10.5-11: -1)
+-> $$ = nterm exp (10.1-11: 2)
+Entering state 8
+Next token is token '=' (10.13: )
+Shifting token '=' (10.13: )
+Entering state 19
 Reading a token
-Next token is token ')' (1.1: )
-Entering state 11
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 30
+Next token is token "number" (10.15: 2)
+Shifting token "number" (10.15: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (10.15: 2)
+-> $$ = nterm exp (10.15: 2)
+Entering state 28
 Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 3333)
+Next token is token '\n' (10.16-11.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (10.1-11: 2)
+   $2 = token '=' (10.13: )
+   $3 = nterm exp (10.15: 2)
+-> $$ = nterm exp (10.1-15: 2)
 Entering state 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
+Next token is token '\n' (10.16-11.0: )
+Shifting token '\n' (10.16-11.0: )
 Entering state 25
-Reducing stack 0 by rule 4 (line 77):
-   $1 = nterm exp (1.1: 3333)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (10.1-15: 2)
+   $2 = token '\n' (10.16-11.0: )
+-> $$ = nterm line (10.1-11.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-10.0: )
+   $2 = nterm line (10.1-11.0: )
+-> $$ = nterm input (1.1-11.0: )
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" (1.1: )
-Entering state 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
+Next token is token '\n' (11.1-12.0: )
+Shifting token '\n' (11.1-12.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (11.1-12.0: )
+-> $$ = nterm line (11.1-12.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-11.0: )
+   $2 = nterm line (11.1-12.0: )
+-> $$ = nterm input (1.1-12.0: )
+Entering state 6
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Next token is token "number" (12.1: 2)
+Shifting token "number" (12.1: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (12.1: 2)
+-> $$ = nterm exp (12.1: 2)
+Entering state 8
 Reading a token
-Next token is token '*' (1.1: )
-syntax error
-Shifting token error (1.1: )
-Entering state 11
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
+Next token is token '^' (12.2: )
+Shifting token '^' (12.2: )
+Entering state 24
 Reading a token
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
+Next token is token "number" (12.3: 2)
+Shifting token "number" (12.3: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (12.3: 2)
+-> $$ = nterm exp (12.3: 2)
+Entering state 33
 Reading a token
-Next token is token ')' (1.1: )
-Entering state 11
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
+Next token is token '^' (12.4: )
+Shifting token '^' (12.4: )
+Entering state 24
+Reading a token
+Next token is token "number" (12.5: 3)
+Shifting token "number" (12.5: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (12.5: 3)
+-> $$ = nterm exp (12.5: 3)
+Entering state 33
+Reading a token
+Next token is token '=' (12.7: )
+Reducing stack 0 by rule 12 (line 124):
+   $1 = nterm exp (12.3: 2)
+   $2 = token '^' (12.4: )
+   $3 = nterm exp (12.5: 3)
+-> $$ = nterm exp (12.3-5: 8)
+Entering state 33
+Next token is token '=' (12.7: )
+Reducing stack 0 by rule 12 (line 124):
+   $1 = nterm exp (12.1: 2)
+   $2 = token '^' (12.2: )
+   $3 = nterm exp (12.3-5: 8)
+-> $$ = nterm exp (12.1-5: 256)
 Entering state 8
+Next token is token '=' (12.7: )
+Shifting token '=' (12.7: )
+Entering state 19
 Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
+Next token is token "number" (12.9-11: 256)
+Shifting token "number" (12.9-11: 256)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (12.9-11: 256)
+-> $$ = nterm exp (12.9-11: 256)
+Entering state 28
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
+Next token is token '\n' (12.12-13.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (12.1-5: 256)
+   $2 = token '=' (12.7: )
+   $3 = nterm exp (12.9-11: 256)
+-> $$ = nterm exp (12.1-11: 256)
+Entering state 8
+Next token is token '\n' (12.12-13.0: )
+Shifting token '\n' (12.12-13.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (12.1-11: 256)
+   $2 = token '\n' (12.12-13.0: )
+-> $$ = nterm line (12.1-13.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-12.0: )
+   $2 = nterm line (12.1-13.0: )
+-> $$ = nterm input (1.1-13.0: )
+Entering state 6
+Reading a token
+Next token is token '(' (13.1: )
+Shifting token '(' (13.1: )
 Entering state 4
 Reading a token
-Next token is token '*' (1.1: )
-syntax error
-Shifting token error (1.1: )
-Entering state 11
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
+Next token is token "number" (13.2: 2)
+Shifting token "number" (13.2: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (13.2: 2)
+-> $$ = nterm exp (13.2: 2)
+Entering state 12
 Reading a token
-Next token is token ')' (1.1: )
-Entering state 11
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 30
+Next token is token '^' (13.3: )
+Shifting token '^' (13.3: )
+Entering state 24
 Reading a token
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 2222)
+Next token is token "number" (13.4: 2)
+Shifting token "number" (13.4: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (13.4: 2)
+-> $$ = nterm exp (13.4: 2)
+Entering state 33
+Reading a token
+Next token is token ')' (13.5: )
+Reducing stack 0 by rule 12 (line 124):
+   $1 = nterm exp (13.2: 2)
+   $2 = token '^' (13.3: )
+   $3 = nterm exp (13.4: 2)
+-> $$ = nterm exp (13.2-4: 4)
+Entering state 12
+Next token is token ')' (13.5: )
+Shifting token ')' (13.5: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 125):
+   $1 = token '(' (13.1: )
+   $2 = nterm exp (13.2-4: 4)
+   $3 = token ')' (13.5: )
+-> $$ = nterm exp (13.1-5: 4)
 Entering state 8
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Next token is token '^' (13.6: )
+Shifting token '^' (13.6: )
+Entering state 24
 Reading a token
-Next token is token '*' (1.1: )
-syntax error
-Shifting token error (1.1: )
-Entering state 11
-Next token is token '*' (1.1: )
-Error: discarding token '*' (1.1: )
+Next token is token "number" (13.7: 3)
+Shifting token "number" (13.7: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (13.7: 3)
+-> $$ = nterm exp (13.7: 3)
+Entering state 33
 Reading a token
-Next token is token ')' (1.1: )
-Entering state 11
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 119):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1: )
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 1111)
-Entering state 30
+Next token is token '=' (13.9: )
+Reducing stack 0 by rule 12 (line 124):
+   $1 = nterm exp (13.1-5: 4)
+   $2 = token '^' (13.6: )
+   $3 = nterm exp (13.7: 3)
+-> $$ = nterm exp (13.1-7: 64)
+Entering state 8
+Next token is token '=' (13.9: )
+Shifting token '=' (13.9: )
+Entering state 19
 Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 3333)
+Next token is token "number" (13.11-12: 64)
+Shifting token "number" (13.11-12: 64)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (13.11-12: 64)
+-> $$ = nterm exp (13.11-12: 64)
+Entering state 28
+Reading a token
+Next token is token '\n' (13.13-14.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (13.1-7: 64)
+   $2 = token '=' (13.9: )
+   $3 = nterm exp (13.11-12: 64)
+-> $$ = nterm exp (13.1-12: 64)
 Entering state 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
+Next token is token '\n' (13.13-14.0: )
+Shifting token '\n' (13.13-14.0: )
 Entering state 25
-Reducing stack 0 by rule 4 (line 77):
-   $1 = nterm exp (1.1: 3333)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (13.1-12: 64)
+   $2 = token '\n' (13.13-14.0: )
+-> $$ = nterm line (13.1-14.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-13.0: )
+   $2 = nterm line (13.1-14.0: )
+-> $$ = nterm input (1.1-14.0: )
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token "end of input" (1.1: )
+Shifting token "end of input" (14.1: )
 Entering state 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-./calc.at:1407: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1407: cat stderr
-./calc.at:1416: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
+Cleanup: popping token "end of input" (14.1: )
+Cleanup: popping nterm input (1.1-14.0: )
+stderr:
+./calc.at:1403: cat stderr
+1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-17: error: 2222 != 1
+./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1407:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "invalid token" (1.1: )
+syntax error
+Cleanup: discarding lookahead token "invalid token" (1.1: )
+./calc.at:1402: cat stderr
+stderr:
+1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-17: error: 2222 != 1
+  | 1 2
+./calc.at:1408:  $PREPARSER ./calc  input
 stderr:
+input:
 Starting parse
 Entering state 0
 Reading a token
 Next token is token "number" (1.1: 1)
 Shifting token "number" (1.1: 1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
+Reducing stack 0 by rule 5 (line 88):
    $1 = token "number" (1.1: 1)
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.1: 2)
-Shifting token "number" (1.1: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 2)
--> $$ = nterm exp (1.1: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.1: )
-Shifting token '*' (1.1: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.1: 3)
-Shifting token "number" (1.1: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 3)
--> $$ = nterm exp (1.1: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 9 (line 100):
-   $1 = nterm exp (1.1: 2)
-   $2 = token '*' (1.1: )
-   $3 = nterm exp (1.1: 3)
--> $$ = nterm exp (1.1: 6)
-Entering state 30
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 6)
--> $$ = nterm exp (1.1: 7)
-Entering state 8
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.1: )
-Shifting token '!' (1.1: )
-Entering state 5
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 14
-Reducing stack 0 by rule 17 (line 122):
-   $1 = token '!' (1.1: )
-   $2 = token '+' (1.1: )
-Cleanup: popping token '+' (1.1: )
-Cleanup: popping nterm exp (1.1: 7)
-./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Next token is token "number" (1.3: 2)
+1.3: syntax error, unexpected number
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token "number" (1.3: 2)
+./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 + !* ++
+./calc.at:1403:  $PREPARSER ./calc  input
+input:
+  | (#) + (#) = 2222
+./calc.at:1402:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1407: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
+stderr:
+1.14: memory exhausted
+./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1405: cat stderr
 Starting parse
 Entering state 0
 Reading a token
 Next token is token "number" (1.1: 1)
 Shifting token "number" (1.1: 1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
+Reducing stack 0 by rule 5 (line 88):
    $1 = token "number" (1.1: 1)
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.1: 2)
-Shifting token "number" (1.1: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 2)
--> $$ = nterm exp (1.1: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.1: )
-Shifting token '*' (1.1: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.1: 3)
-Shifting token "number" (1.1: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 3)
--> $$ = nterm exp (1.1: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 9 (line 100):
-   $1 = nterm exp (1.1: 2)
-   $2 = token '*' (1.1: )
-   $3 = nterm exp (1.1: 3)
--> $$ = nterm exp (1.1: 6)
-Entering state 30
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 6)
--> $$ = nterm exp (1.1: 7)
-Entering state 8
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.1: )
-Shifting token '!' (1.1: )
-Entering state 5
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 14
-Reducing stack 0 by rule 17 (line 122):
-   $1 = token '!' (1.1: )
-   $2 = token '+' (1.1: )
-Cleanup: popping token '+' (1.1: )
-Cleanup: popping nterm exp (1.1: 7)
+Next token is token "number" (1.3: 2)
+1.3: syntax error, unexpected number
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token "number" (1.3: 2)
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | 1 + 2 * 3 + !- ++
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1407: cat stderr
+stderr:
+input:
+  | 1 = 2 = 3
 ./calc.at:1407:  $PREPARSER ./calc  input
+1.14: memory exhausted
+  | (* *) + (*) + (*)
+./calc.at:1405:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1402: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -95273,9 +95226,9 @@
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
+Next token is token '=' (1.1: )
+Shifting token '=' (1.1: )
+Entering state 19
 Reading a token
 Next token is token "number" (1.1: 2)
 Shifting token "number" (1.1: 2)
@@ -95283,52 +95236,41 @@
 Reducing stack 0 by rule 5 (line 81):
    $1 = token "number" (1.1: 2)
 -> $$ = nterm exp (1.1: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.1: )
-Shifting token '*' (1.1: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.1: 3)
-Shifting token "number" (1.1: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 3)
--> $$ = nterm exp (1.1: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 9 (line 100):
-   $1 = nterm exp (1.1: 2)
-   $2 = token '*' (1.1: )
-   $3 = nterm exp (1.1: 3)
--> $$ = nterm exp (1.1: 6)
-Entering state 30
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 6)
--> $$ = nterm exp (1.1: 7)
-Entering state 8
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.1: )
-Shifting token '!' (1.1: )
-Entering state 5
+Entering state 28
 Reading a token
-Next token is token '-' (1.1: )
-Shifting token '-' (1.1: )
-Entering state 13
-Reducing stack 0 by rule 18 (line 123):
-   $1 = token '!' (1.1: )
-   $2 = token '-' (1.1: )
-Cleanup: popping token '+' (1.1: )
-Cleanup: popping nterm exp (1.1: 7)
+Next token is token '=' (1.1: )
+syntax error
+Error: popping nterm exp (1.1: 2)
+Error: popping token '=' (1.1: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '=' (1.1: )
+./calc.at:1408: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1403: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1408: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -95340,9 +95282,9 @@
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
+Next token is token '=' (1.1: )
+Shifting token '=' (1.1: )
+Entering state 19
 Reading a token
 Next token is token "number" (1.1: 2)
 Shifting token "number" (1.1: 2)
@@ -95350,53 +95292,38 @@
 Reducing stack 0 by rule 5 (line 81):
    $1 = token "number" (1.1: 2)
 -> $$ = nterm exp (1.1: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.1: )
-Shifting token '*' (1.1: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.1: 3)
-Shifting token "number" (1.1: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 3)
--> $$ = nterm exp (1.1: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 9 (line 100):
-   $1 = nterm exp (1.1: 2)
-   $2 = token '*' (1.1: )
-   $3 = nterm exp (1.1: 3)
--> $$ = nterm exp (1.1: 6)
-Entering state 30
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 6)
--> $$ = nterm exp (1.1: 7)
-Entering state 8
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.1: )
-Shifting token '!' (1.1: )
-Entering state 5
+Entering state 28
 Reading a token
-Next token is token '-' (1.1: )
-Shifting token '-' (1.1: )
-Entering state 13
-Reducing stack 0 by rule 18 (line 123):
-   $1 = token '!' (1.1: )
-   $2 = token '-' (1.1: )
-Cleanup: popping token '+' (1.1: )
-Cleanup: popping nterm exp (1.1: 7)
+Next token is token '=' (1.1: )
+syntax error
+Error: popping nterm exp (1.1: 2)
+Error: popping token '=' (1.1: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '=' (1.1: )
+./calc.at:1402: cat stderr
+stderr:
+./calc.at:1403: cat stderr
+1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+input:
+input:
+  | 1//2
+./calc.at:1408:  $PREPARSER ./calc  input
+  | (#) + (#) = 2222
+./calc.at:1403:  $PREPARSER ./calc  input
+input:
+stderr:
 stderr:
 stdout:
-./calc.at:1408: "$PERL" -ne '
+  | (1 + #) = 1111
+./calc.at:1405: cat stderr
+./calc.at:1402:  $PREPARSER ./calc  input
+stderr:
+stderr:
+1.6: syntax error: invalid character: '#'
+./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1409: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -95407,7 +95334,6 @@
         || /\t/
         )' calc.c calc.h
 
-./calc.at:1426: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
 ./calc.at:1407: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -95418,7 +95344,69 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 23
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '/' (1.3: )
+./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+input:
+1.6: syntax error: invalid character: '#'
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1405:  $PREPARSER ./calc  input
 input:
+stderr:
+stderr:
+./calc.at:1407: cat stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 23
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '/' (1.3: )
+./calc.at:1402: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -95432,80 +95420,28 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1408:  $PREPARSER ./calc  input
-./calc.at:1407: cat stderr
+./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1409:  $PREPARSER ./calc  input
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1402: cat stderr
 input:
-  | 1 + 2 * 3 + !* ++
+./calc.at:1408: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
-./calc.at:1407:  $PREPARSER ./calc  input
 stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.1: 2)
-Shifting token "number" (1.1: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 2)
--> $$ = nterm exp (1.1: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.1: )
-Shifting token '*' (1.1: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.1: 3)
-Shifting token "number" (1.1: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 3)
--> $$ = nterm exp (1.1: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 9 (line 100):
-   $1 = nterm exp (1.1: 2)
-   $2 = token '*' (1.1: )
-   $3 = nterm exp (1.1: 3)
--> $$ = nterm exp (1.1: 6)
-Entering state 30
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 6)
--> $$ = nterm exp (1.1: 7)
-Entering state 8
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.1: )
-Shifting token '!' (1.1: )
-Entering state 5
-Reading a token
-Next token is token '*' (1.1: )
-Shifting token '*' (1.1: )
-Entering state 15
-Reducing stack 0 by rule 19 (line 124):
-   $1 = token '!' (1.1: )
-   $2 = token '*' (1.1: )
-memory exhausted
-Cleanup: popping token '+' (1.1: )
-Cleanup: popping nterm exp (1.1: 7)
-./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 
+  | +1
+./calc.at:1407:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -96342,75 +96278,24 @@
 Entering state 17
 Cleanup: popping token "end of input" (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
-./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+input:
 stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.1: 2)
-Shifting token "number" (1.1: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 2)
--> $$ = nterm exp (1.1: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.1: )
-Shifting token '*' (1.1: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.1: 3)
-Shifting token "number" (1.1: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 3)
--> $$ = nterm exp (1.1: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 9 (line 100):
-   $1 = nterm exp (1.1: 2)
-   $2 = token '*' (1.1: )
-   $3 = nterm exp (1.1: 3)
--> $$ = nterm exp (1.1: 6)
-Entering state 30
-Next token is token '+' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 6)
--> $$ = nterm exp (1.1: 7)
-Entering state 8
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.1: )
-Shifting token '!' (1.1: )
-Entering state 5
-Reading a token
-Next token is token '*' (1.1: )
-Shifting token '*' (1.1: )
-Entering state 15
-Reducing stack 0 by rule 19 (line 124):
-   $1 = token '!' (1.1: )
-   $2 = token '*' (1.1: )
-memory exhausted
-Cleanup: popping token '+' (1.1: )
-Cleanup: popping nterm exp (1.1: 7)
+  | (# + 1) = 1111
+./calc.at:1402:  $PREPARSER ./calc  input
+./calc.at:1408: cat stderr
+input:
+./calc.at:1403: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Reading a token
@@ -97247,10 +97132,69 @@
 Entering state 17
 Cleanup: popping token "end of input" (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 76):
+   $1 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
+Entering state 6
+Reading a token
+Next token is token '+' (1.1: )
+syntax error
+Error: popping nterm input (1.1: )
+Cleanup: discarding lookahead token '+' (1.1: )
+./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
 input:
+1.2: syntax error: invalid character: '#'
+./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+  | 1 + 2 * 3 + !- ++
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 76):
+   $1 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
+Entering state 6
+Reading a token
+Next token is token '+' (1.1: )
+syntax error
+Error: popping nterm input (1.1: )
+Cleanup: discarding lookahead token '+' (1.1: )
+./calc.at:1405:  $PREPARSER ./calc  input
   | 1 2
+./calc.at:1409:  $PREPARSER ./calc  input
+  | error
 ./calc.at:1408:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1403: cat stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "invalid token" (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" (1.1: )
+./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -97266,7 +97210,30 @@
 1.3: syntax error, unexpected number
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token "number" (1.3: 2)
-./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error: invalid character: '#'
+input:
+./calc.at:1407: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | (1 + #) = 1111
+stderr:
+./calc.at:1403:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "invalid token" (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" (1.1: )
 stderr:
 Starting parse
 Entering state 0
@@ -97283,7 +97250,21 @@
 1.3: syntax error, unexpected number
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token "number" (1.3: 2)
-./calc.at:1407: "$PERL" -pi -e 'use strict;
+1.6: syntax error: invalid character: '#'
+./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1402: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+./calc.at:1409: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -97303,14 +97284,78 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1408: cat stderr
 ./calc.at:1407: cat stderr
+./calc.at:1402: cat stderr
+1.6: syntax error: invalid character: '#'
+./calc.at:1407:  $PREPARSER ./calc  /dev/null
+./calc.at:1405: cat stderr
+stderr:
 input:
-  | 1//2
-./calc.at:1408:  $PREPARSER ./calc  input
+./calc.at:1403: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+syntax error
+Cleanup: discarding lookahead token "end of input" (1.1: )
+  | (1 + # + 1) = 1111
+./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1402:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1408: cat stderr
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+syntax error
+Cleanup: discarding lookahead token "end of input" (1.1: )
+./calc.at:1409: cat stderr
 stderr:
 input:
-  | (#) + (#) = 2222
+  | 1 + 2 * 3 + !* ++
+1.6: syntax error: invalid character: '#'
+./calc.at:1405:  $PREPARSER ./calc  input
+./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1403: cat stderr
+1.14: memory exhausted
+./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1407: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+stderr:
+1.6: syntax error: invalid character: '#'
+input:
+input:
+  | 1 = 2 = 3
+./calc.at:1408:  $PREPARSER ./calc  input
+  | 1//2
+./calc.at:1409:  $PREPARSER ./calc  input
+input:
+stderr:
+  | (# + 1) = 1111
+./calc.at:1403:  $PREPARSER ./calc  input
+stderr:
+stderr:
+1.2: syntax error: invalid character: '#'
+./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -97331,9 +97376,152 @@
 Error: popping token '/' (1.2: )
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '/' (1.3: )
-./calc.at:1407:  $PREPARSER ./calc  input
+./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.14: memory exhausted
+./calc.at:1407: cat stderr
+./calc.at:1402: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 28
+Reading a token
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Error: popping token '=' (1.3: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '=' (1.7: )
+1.2: syntax error: invalid character: '#'
 ./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 23
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '/' (1.3: )
+./calc.at:1405: cat stderr
+stderr:
+input:
+./calc.at:1402: cat stderr
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1407:  $PREPARSER ./calc  input
+./calc.at:1403: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 28
+Reading a token
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Error: popping token '=' (1.3: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '=' (1.7: )
+./calc.at:1403: cat stderr
+./calc.at:1408: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+input:
+input:
+./calc.at:1409: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+  | (1 + # + 1) = 1111
+./calc.at:1403:  $PREPARSER ./calc  input
+  | (#) + (#) = 2222
+./calc.at:1405:  $PREPARSER ./calc  input
+./calc.at:1409: cat stderr
+  | (1 + 1) / (1 - 1)
 stderr:
+./calc.at:1402:  $PREPARSER ./calc  input
+1.6: syntax error: invalid character: '#'
+./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -97341,14 +97529,9 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-syntax error: invalid character: '#'
-Next token is token error (1.1: )
-Shifting token error (1.1: )
-Entering state 11
-Next token is token error (1.1: )
-Error: discarding token error (1.1: )
-Reading a token
 Next token is token ')' (1.1: )
+syntax error
+Shifting token error (1.1: )
 Entering state 11
 Next token is token ')' (1.1: )
 Shifting token ')' (1.1: )
@@ -97368,14 +97551,61 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-syntax error: invalid character: '#'
-Next token is token error (1.1: )
-Shifting token error (1.1: )
-Entering state 11
-Next token is token error (1.1: )
-Error: discarding token error (1.1: )
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 30
+Reading a token
+Next token is token '+' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1)
+-> $$ = nterm exp (1.1: 2)
+Entering state 12
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 30
+Reading a token
+Next token is token '+' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 2)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1)
+-> $$ = nterm exp (1.1: 3)
+Entering state 12
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
 Reading a token
 Next token is token ')' (1.1: )
+syntax error
+Error: popping token '+' (1.1: )
+Error: popping nterm exp (1.1: 3)
+Shifting token error (1.1: )
 Entering state 11
 Next token is token ')' (1.1: )
 Shifting token ')' (1.1: )
@@ -97387,86 +97617,33 @@
 -> $$ = nterm exp (1.1: 1111)
 Entering state 30
 Reading a token
-Next token is token '=' (1.1: )
+Next token is token '+' (1.1: )
 Reducing stack 0 by rule 7 (line 98):
    $1 = nterm exp (1.1: 1111)
    $2 = token '+' (1.1: )
    $3 = nterm exp (1.1: 1111)
 -> $$ = nterm exp (1.1: 2222)
 Entering state 8
-Next token is token '=' (1.1: )
-Shifting token '=' (1.1: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.1: 2222)
-Shifting token "number" (1.1: 2222)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 2222)
--> $$ = nterm exp (1.1: 2222)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack 0 by rule 6 (line 82):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '=' (1.1: )
-   $3 = nterm exp (1.1: 2222)
--> $$ = nterm exp (1.1: 2222)
-Entering state 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 77):
-   $1 = nterm exp (1.1: 2222)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (1.1: )
-Entering state 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-stderr:
-./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 23
-Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '/' (1.3: )
-Starting parse
-Entering state 0
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
 Reading a token
 Next token is token '(' (1.1: )
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-syntax error: invalid character: '#'
-Next token is token error (1.1: )
+Next token is token '*' (1.1: )
+syntax error
 Shifting token error (1.1: )
 Entering state 11
-Next token is token error (1.1: )
-Error: discarding token error (1.1: )
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
+Reading a token
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
+Reading a token
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
 Reading a token
 Next token is token ')' (1.1: )
 Entering state 11
@@ -97478,9 +97655,16 @@
    $2 = token error (1.1: )
    $3 = token ')' (1.1: )
 -> $$ = nterm exp (1.1: 1111)
-Entering state 8
+Entering state 30
 Reading a token
 Next token is token '+' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 2222)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 3333)
+Entering state 8
+Next token is token '+' (1.1: )
 Shifting token '+' (1.1: )
 Entering state 21
 Reading a token
@@ -97488,12 +97672,45 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-syntax error: invalid character: '#'
-Next token is token error (1.1: )
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 12
+Reading a token
+Next token is token '*' (1.1: )
+Shifting token '*' (1.1: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.1: 2)
+Shifting token "number" (1.1: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 2)
+-> $$ = nterm exp (1.1: 2)
+Entering state 31
+Reading a token
+Next token is token '*' (1.1: )
+Reducing stack 0 by rule 9 (line 100):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '*' (1.1: )
+   $3 = nterm exp (1.1: 2)
+-> $$ = nterm exp (1.1: 2)
+Entering state 12
+Next token is token '*' (1.1: )
+Shifting token '*' (1.1: )
+Entering state 22
+Reading a token
+Next token is token '*' (1.1: )
+syntax error
+Error: popping token '*' (1.1: )
+Error: popping nterm exp (1.1: 2)
 Shifting token error (1.1: )
 Entering state 11
-Next token is token error (1.1: )
-Error: discarding token error (1.1: )
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
 Reading a token
 Next token is token ')' (1.1: )
 Entering state 11
@@ -97509,35 +97726,36 @@
 Reading a token
 Next token is token '=' (1.1: )
 Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1111)
+   $1 = nterm exp (1.1: 3333)
    $2 = token '+' (1.1: )
    $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 2222)
+-> $$ = nterm exp (1.1: 4444)
 Entering state 8
 Next token is token '=' (1.1: )
 Shifting token '=' (1.1: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.1: 2222)
-Shifting token "number" (1.1: 2222)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 2222)
--> $$ = nterm exp (1.1: 2222)
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 28
 Reading a token
 Next token is token '\n' (1.1: )
 Reducing stack 0 by rule 6 (line 82):
-   $1 = nterm exp (1.1: 2222)
+   $1 = nterm exp (1.1: 4444)
    $2 = token '=' (1.1: )
-   $3 = nterm exp (1.1: 2222)
--> $$ = nterm exp (1.1: 2222)
+   $3 = nterm exp (1.1: 1)
+error: 4444 != 1
+-> $$ = nterm exp (1.1: 4444)
 Entering state 8
 Next token is token '\n' (1.1: )
 Shifting token '\n' (1.1: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 77):
-   $1 = nterm exp (1.1: 2222)
+   $1 = nterm exp (1.1: 4444)
    $2 = token '\n' (1.1: )
 -> $$ = nterm line (1.1: )
 Entering state 7
@@ -97551,42 +97769,11 @@
 Entering state 17
 Cleanup: popping token "end of input" (1.1: )
 Cleanup: popping nterm input (1.1: )
-./calc.at:1408: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1407: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+stderr:
+./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./calc.at:1408: cat stderr
-./calc.at:1407: cat stderr
-input:
-  | error
-./calc.at:1408:  $PREPARSER ./calc  input
 stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "invalid token" (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token "invalid token" (1.1: )
-input:
-./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (1 + #) = 1111
-./calc.at:1407:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -97595,28 +97782,9 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error (1.1: )
-Error: popping token '+' (1.1: )
-Error: popping nterm exp (1.1: 1)
-Shifting token error (1.1: )
-Entering state 11
-Next token is token error (1.1: )
-Error: discarding token error (1.1: )
-Reading a token
 Next token is token ')' (1.1: )
+syntax error
+Shifting token error (1.1: )
 Entering state 11
 Next token is token ')' (1.1: )
 Shifting token ')' (1.1: )
@@ -97628,58 +97796,44 @@
 -> $$ = nterm exp (1.1: 1111)
 Entering state 8
 Reading a token
-Next token is token '=' (1.1: )
-Shifting token '=' (1.1: )
-Entering state 19
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
 Reading a token
-Next token is token "number" (1.1: 1111)
-Shifting token "number" (1.1: 1111)
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack 0 by rule 6 (line 82):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '=' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
-Entering state 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 77):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
-Entering state 6
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 12
 Reading a token
-Now at end of input.
-Shifting token "end of input" (1.1: )
-Entering state 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
 Reading a token
-Next token is token "invalid token" (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token "invalid token" (1.1: )
-stderr:
-Starting parse
-Entering state 0
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 30
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Next token is token '+' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1)
+-> $$ = nterm exp (1.1: 2)
+Entering state 12
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
 Reading a token
 Next token is token "number" (1.1: 1)
 Shifting token "number" (1.1: 1)
@@ -97687,23 +97841,25 @@
 Reducing stack 0 by rule 5 (line 81):
    $1 = token "number" (1.1: 1)
 -> $$ = nterm exp (1.1: 1)
-Entering state 12
+Entering state 30
 Reading a token
 Next token is token '+' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 2)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1)
+-> $$ = nterm exp (1.1: 3)
+Entering state 12
+Next token is token '+' (1.1: )
 Shifting token '+' (1.1: )
 Entering state 21
 Reading a token
-syntax error: invalid character: '#'
-Next token is token error (1.1: )
+Next token is token ')' (1.1: )
+syntax error
 Error: popping token '+' (1.1: )
-Error: popping nterm exp (1.1: 1)
+Error: popping nterm exp (1.1: 3)
 Shifting token error (1.1: )
 Entering state 11
-Next token is token error (1.1: )
-Error: discarding token error (1.1: )
-Reading a token
-Next token is token ')' (1.1: )
-Entering state 11
 Next token is token ')' (1.1: )
 Shifting token ')' (1.1: )
 Entering state 26
@@ -97712,94 +97868,35 @@
    $2 = token error (1.1: )
    $3 = token ')' (1.1: )
 -> $$ = nterm exp (1.1: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.1: )
-Shifting token '=' (1.1: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.1: 1111)
-Shifting token "number" (1.1: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
-Entering state 28
+Entering state 30
 Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack 0 by rule 6 (line 82):
+Next token is token '+' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
    $1 = nterm exp (1.1: 1111)
-   $2 = token '=' (1.1: )
+   $2 = token '+' (1.1: )
    $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 2222)
 Entering state 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 77):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (1.1: )
-Entering state 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-./calc.at:1408: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1407: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1408: cat stderr
-./calc.at:1407: cat stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1408:  $PREPARSER ./calc  input
-input:
-  | (# + 1) = 1111
-stderr:
-./calc.at:1407:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
 Reading a token
 Next token is token '(' (1.1: )
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-syntax error: invalid character: '#'
-Next token is token error (1.1: )
+Next token is token '*' (1.1: )
+syntax error
 Shifting token error (1.1: )
 Entering state 11
-Next token is token error (1.1: )
-Error: discarding token error (1.1: )
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
 Reading a token
-Next token is token '+' (1.1: )
-Error: discarding token '+' (1.1: )
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
 Reading a token
-Next token is token "number" (1.1: 1)
-Error: discarding token "number" (1.1: 1)
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
 Reading a token
 Next token is token ')' (1.1: )
 Entering state 11
@@ -97811,95 +97908,62 @@
    $2 = token error (1.1: )
    $3 = token ')' (1.1: )
 -> $$ = nterm exp (1.1: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.1: )
-Shifting token '=' (1.1: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.1: 1111)
-Shifting token "number" (1.1: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
-Entering state 28
+Entering state 30
 Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack 0 by rule 6 (line 82):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '=' (1.1: )
+Next token is token '+' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 2222)
+   $2 = token '+' (1.1: )
    $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 3333)
 Entering state 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 77):
-   $1 = nterm exp (1.1: 1111)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
-Entering state 6
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
 Reading a token
-Now at end of input.
-Shifting token "end of input" (1.1: )
-Entering state 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
 Reading a token
 Next token is token "number" (1.1: 1)
 Shifting token "number" (1.1: 1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 88):
+Reducing stack 0 by rule 5 (line 81):
    $1 = token "number" (1.1: 1)
 -> $$ = nterm exp (1.1: 1)
-Entering state 8
+Entering state 12
 Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
-Entering state 19
+Next token is token '*' (1.1: )
+Shifting token '*' (1.1: )
+Entering state 22
 Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
+Next token is token "number" (1.1: 2)
+Shifting token "number" (1.1: 2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 28
-Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Error: popping token '=' (1.3: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '=' (1.7: )
-Starting parse
-Entering state 0
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 2)
+-> $$ = nterm exp (1.1: 2)
+Entering state 31
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Next token is token '*' (1.1: )
+Reducing stack 0 by rule 9 (line 100):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '*' (1.1: )
+   $3 = nterm exp (1.1: 2)
+-> $$ = nterm exp (1.1: 2)
+Entering state 12
+Next token is token '*' (1.1: )
+Shifting token '*' (1.1: )
+Entering state 22
 Reading a token
-syntax error: invalid character: '#'
-Next token is token error (1.1: )
+Next token is token '*' (1.1: )
+syntax error
+Error: popping token '*' (1.1: )
+Error: popping nterm exp (1.1: 2)
 Shifting token error (1.1: )
 Entering state 11
-Next token is token error (1.1: )
-Error: discarding token error (1.1: )
-Reading a token
-Next token is token '+' (1.1: )
-Error: discarding token '+' (1.1: )
-Reading a token
-Next token is token "number" (1.1: 1)
-Error: discarding token "number" (1.1: 1)
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
 Reading a token
 Next token is token ')' (1.1: )
 Entering state 11
@@ -97911,32 +97975,40 @@
    $2 = token error (1.1: )
    $3 = token ')' (1.1: )
 -> $$ = nterm exp (1.1: 1111)
-Entering state 8
+Entering state 30
 Reading a token
 Next token is token '=' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 3333)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 4444)
+Entering state 8
+Next token is token '=' (1.1: )
 Shifting token '=' (1.1: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.1: 1111)
-Shifting token "number" (1.1: 1111)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 28
 Reading a token
 Next token is token '\n' (1.1: )
 Reducing stack 0 by rule 6 (line 82):
-   $1 = nterm exp (1.1: 1111)
+   $1 = nterm exp (1.1: 4444)
    $2 = token '=' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
+   $3 = nterm exp (1.1: 1)
+error: 4444 != 1
+-> $$ = nterm exp (1.1: 4444)
 Entering state 8
 Next token is token '\n' (1.1: )
 Shifting token '\n' (1.1: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 77):
-   $1 = nterm exp (1.1: 1111)
+   $1 = nterm exp (1.1: 4444)
    $2 = token '\n' (1.1: )
 -> $$ = nterm line (1.1: )
 Entering state 7
@@ -97950,48 +98022,20 @@
 Entering state 17
 Cleanup: popping token "end of input" (1.1: )
 Cleanup: popping nterm input (1.1: )
-./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.6: syntax error: invalid character: '#'
+input:
+  | error
+./calc.at:1409:  $PREPARSER ./calc  input
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.11-17: error: null divisor
+./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 28
-Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Error: popping token '=' (1.3: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '=' (1.7: )
-./calc.at:1408: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1407: "$PERL" -pi -e 'use strict;
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1403: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -98001,13 +98045,23 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1408: cat stderr
 input:
+stderr:
+./calc.at:1405: cat stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "invalid token" (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" (1.1: )
+./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 
   | +1
 ./calc.at:1408:  $PREPARSER ./calc  input
+1.11-17: error: null divisor
+./calc.at:1403: cat stderr
 stderr:
-./calc.at:1407: cat stderr
+input:
 Starting parse
 Entering state 0
 Reading a token
@@ -98027,8 +98081,44 @@
 2.1: syntax error, unexpected '+'
 Error: popping nterm input (1.1-2.0: )
 Cleanup: discarding lookahead token '+' (2.1: )
+stderr:
 ./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "invalid token" (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" (1.1: )
+  | (1 + #) = 1111
+./calc.at:1407: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1405:  $PREPARSER ./calc  input
+stderr:
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1402: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+./calc.at:1403:  $PREPARSER ./calc  input
+1.6: syntax error: invalid character: '#'
 stderr:
+./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -98048,7 +98138,24 @@
 2.1: syntax error, unexpected '+'
 Error: popping nterm input (1.1-2.0: )
 Cleanup: discarding lookahead token '+' (2.1: )
-input:
+1.11-17: error: null divisor
+./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1409: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+1.6: syntax error: invalid character: '#'
+stderr:
+1.11-17: error: null divisor
+./calc.at:1402: cat stderr
+./calc.at:1405: cat stderr
 ./calc.at:1408: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -98059,42 +98166,169 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | (1 + # + 1) = 1111
+./calc.at:1409: cat stderr
+./calc.at:1408: cat stderr
+./calc.at:1403: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1408:  $PREPARSER ./calc  /dev/null
+./calc.at:1407: cat stderr
+512. calc.at:1402:  ok
+stderr:
+input:
+  | 1 = 2 = 3
+./calc.at:1409:  $PREPARSER ./calc  input
+input:
+  | (# + 1) = 1111
+stderr:
+./calc.at:1405:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" (1.1: )
+./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 28
+Reading a token
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Error: popping token '=' (1.3: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '=' (1.7: )
+./calc.at:1403: cat stderr
+./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (!!) + (1 2) = 1
 ./calc.at:1407:  $PREPARSER ./calc  input
 stderr:
+1.2: syntax error: invalid character: '#'
+./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+1.2: syntax error: invalid character: '#'
+513. calc.at:1403: stderr:
+ ok
+./calc.at:1405: cat stderr
+stderr:
+
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Now at end of input.
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" (1.1: )
+Starting parse
+Entering state 0
 Reading a token
 Next token is token "number" (1.1: 1)
 Shifting token "number" (1.1: 1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 81):
+Reducing stack 0 by rule 5 (line 88):
    $1 = token "number" (1.1: 1)
 -> $$ = nterm exp (1.1: 1)
-Entering state 12
+Entering state 8
 Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 19
 Reading a token
-syntax error: invalid character: '#'
-Next token is token error (1.1: )
-Error: popping token '+' (1.1: )
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 28
+Reading a token
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Error: popping token '=' (1.3: )
 Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '=' (1.7: )
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '!' (1.1: )
+Shifting token '!' (1.1: )
+Entering state 5
+Reading a token
+Next token is token '!' (1.1: )
+Shifting token '!' (1.1: )
+Entering state 16
+Reducing stack 0 by rule 16 (line 121):
+   $1 = token '!' (1.1: )
+   $2 = token '!' (1.1: )
 Shifting token error (1.1: )
 Entering state 11
-Next token is token error (1.1: )
-Error: discarding token error (1.1: )
+Reading a token
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
 Reading a token
 Next token is token '+' (1.1: )
-Error: discarding token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
 Reading a token
 Next token is token "number" (1.1: 1)
-Error: discarding token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 12
+Reading a token
+Next token is token "number" (1.1: 2)
+syntax error
+Error: popping nterm exp (1.1: 1)
+Shifting token error (1.1: )
+Entering state 11
+Next token is token "number" (1.1: 2)
+Error: discarding token "number" (1.1: 2)
 Reading a token
 Next token is token ')' (1.1: )
 Entering state 11
@@ -98106,32 +98340,40 @@
    $2 = token error (1.1: )
    $3 = token ')' (1.1: )
 -> $$ = nterm exp (1.1: 1111)
-Entering state 8
+Entering state 30
 Reading a token
 Next token is token '=' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 2222)
+Entering state 8
+Next token is token '=' (1.1: )
 Shifting token '=' (1.1: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.1: 1111)
-Shifting token "number" (1.1: 1111)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 28
 Reading a token
 Next token is token '\n' (1.1: )
 Reducing stack 0 by rule 6 (line 82):
-   $1 = nterm exp (1.1: 1111)
+   $1 = nterm exp (1.1: 2222)
    $2 = token '=' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
+   $3 = nterm exp (1.1: 1)
+error: 2222 != 1
+-> $$ = nterm exp (1.1: 2222)
 Entering state 8
 Next token is token '\n' (1.1: )
 Shifting token '\n' (1.1: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 77):
-   $1 = nterm exp (1.1: 1111)
+   $1 = nterm exp (1.1: 2222)
    $2 = token '\n' (1.1: )
 -> $$ = nterm line (1.1: )
 Entering state 7
@@ -98146,9 +98388,7 @@
 Cleanup: popping token "end of input" (1.1: )
 Cleanup: popping nterm input (1.1: )
 ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1408: cat stderr
 stderr:
-./calc.at:1408:  $PREPARSER ./calc  /dev/null
 Starting parse
 Entering state 0
 Reading a token
@@ -98156,6 +98396,37 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
+Next token is token '!' (1.1: )
+Shifting token '!' (1.1: )
+Entering state 5
+Reading a token
+Next token is token '!' (1.1: )
+Shifting token '!' (1.1: )
+Entering state 16
+Reducing stack 0 by rule 16 (line 121):
+   $1 = token '!' (1.1: )
+   $2 = token '!' (1.1: )
+Shifting token error (1.1: )
+Entering state 11
+Reading a token
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
 Next token is token "number" (1.1: 1)
 Shifting token "number" (1.1: 1)
 Entering state 1
@@ -98164,24 +98435,13 @@
 -> $$ = nterm exp (1.1: 1)
 Entering state 12
 Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error (1.1: )
-Error: popping token '+' (1.1: )
+Next token is token "number" (1.1: 2)
+syntax error
 Error: popping nterm exp (1.1: 1)
 Shifting token error (1.1: )
 Entering state 11
-Next token is token error (1.1: )
-Error: discarding token error (1.1: )
-Reading a token
-Next token is token '+' (1.1: )
-Error: discarding token '+' (1.1: )
-Reading a token
-Next token is token "number" (1.1: 1)
-Error: discarding token "number" (1.1: 1)
+Next token is token "number" (1.1: 2)
+Error: discarding token "number" (1.1: 2)
 Reading a token
 Next token is token ')' (1.1: )
 Entering state 11
@@ -98193,32 +98453,40 @@
    $2 = token error (1.1: )
    $3 = token ')' (1.1: )
 -> $$ = nterm exp (1.1: 1111)
-Entering state 8
+Entering state 30
 Reading a token
 Next token is token '=' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 2222)
+Entering state 8
+Next token is token '=' (1.1: )
 Shifting token '=' (1.1: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.1: 1111)
-Shifting token "number" (1.1: 1111)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 28
 Reading a token
 Next token is token '\n' (1.1: )
 Reducing stack 0 by rule 6 (line 82):
-   $1 = nterm exp (1.1: 1111)
+   $1 = nterm exp (1.1: 2222)
    $2 = token '=' (1.1: )
-   $3 = nterm exp (1.1: 1111)
--> $$ = nterm exp (1.1: 1111)
+   $3 = nterm exp (1.1: 1)
+error: 2222 != 1
+-> $$ = nterm exp (1.1: 2222)
 Entering state 8
 Next token is token '\n' (1.1: )
 Shifting token '\n' (1.1: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 77):
-   $1 = nterm exp (1.1: 1111)
+   $1 = nterm exp (1.1: 2222)
    $2 = token '\n' (1.1: )
 -> $$ = nterm line (1.1: )
 Entering state 7
@@ -98232,22 +98500,7 @@
 Entering state 17
 Cleanup: popping token "end of input" (1.1: )
 Cleanup: popping nterm input (1.1: )
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" (1.1: )
-./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" (1.1: )
-./calc.at:1407: "$PERL" -pi -e 'use strict;
+./calc.at:1409: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -98257,6 +98510,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
 ./calc.at:1408: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -98267,14 +98521,33 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+  | (1 + # + 1) = 1111
+./calc.at:1405:  $PREPARSER ./calc  input
+./calc.at:1409: cat stderr
+stderr:
 ./calc.at:1408: cat stderr
-./calc.at:1407: cat stderr
+1.6: syntax error: invalid character: '#'
+./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
 input:
+stderr:
 input:
+1.6: syntax error: invalid character: '#'
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-  | (1 + 1) / (1 - 1)
 ./calc.at:1408:  $PREPARSER ./calc  input
-./calc.at:1407:  $PREPARSER ./calc  input
+./calc.at:1407: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | 
+  | +1
+./calc.at:1409:  $PREPARSER ./calc  input
 stderr:
 stderr:
 Starting parse
@@ -98524,125 +98797,33 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1405: cat stderr
 ./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 30
-Reading a token
-Next token is token ')' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.1: )
-   $3 = nterm exp (1.1: 1)
--> $$ = nterm exp (1.1: 2)
-Entering state 12
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 118):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.1: 2)
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 2)
-Entering state 8
-Reading a token
-Next token is token '/' (1.1: )
-Shifting token '/' (1.1: )
-Entering state 23
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 12
-Reading a token
-Next token is token '-' (1.1: )
-Shifting token '-' (1.1: )
-Entering state 20
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 81):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 29
-Reading a token
-Next token is token ')' (1.1: )
-Reducing stack 0 by rule 8 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '-' (1.1: )
-   $3 = nterm exp (1.1: 1)
--> $$ = nterm exp (1.1: 0)
-Entering state 12
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 118):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.1: 0)
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 0)
-Entering state 32
-Reading a token
-Next token is token '\n' (1.1: )
-Reducing stack 0 by rule 10 (line 101):
-   $1 = nterm exp (1.1: 2)
-   $2 = token '/' (1.1: )
-   $3 = nterm exp (1.1: 0)
-error: null divisor
--> $$ = nterm exp (1.1: 2)
-Entering state 8
-Next token is token '\n' (1.1: )
-Shifting token '\n' (1.1: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 77):
-   $1 = nterm exp (1.1: 2)
-   $2 = token '\n' (1.1: )
--> $$ = nterm line (1.1: )
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
 Entering state 7
-Reducing stack 0 by rule 1 (line 71):
-   $1 = nterm line (1.1: )
--> $$ = nterm input (1.1: )
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" (1.1: )
-Entering state 17
-Cleanup: popping token "end of input" (1.1: )
-Cleanup: popping nterm input (1.1: )
-./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Cleanup: discarding lookahead token '+' (2.1: )
+./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1407: cat stderr
 stderr:
+520. calc.at:1414: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
+./calc.at:1414: mv calc.y.tmp calc.y
+
 Starting parse
 Entering state 0
 Reading a token
@@ -98890,6 +99071,58 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+input:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Cleanup: discarding lookahead token '+' (2.1: )
+  | (1 + 1) / (1 - 1)
+./calc.at:1405:  $PREPARSER ./calc  input
+./calc.at:1408: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+stderr:
+input:
+./calc.at:1409: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+1.11-17: error: null divisor
+./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (- *) + (1 2) = 1
+./calc.at:1407:  $PREPARSER ./calc  input
+./calc.at:1409: cat stderr
+stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -98897,6 +99130,42 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
+Next token is token '-' (1.1: )
+Shifting token '-' (1.1: )
+Entering state 2
+Reading a token
+Next token is token '*' (1.1: )
+syntax error
+Shifting token error (1.1: )
+Entering state 9
+Reducing stack 0 by rule 15 (line 120):
+   $1 = token '-' (1.1: )
+   $2 = token error (1.1: )
+Shifting token error (1.1: )
+Entering state 11
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
+Reading a token
+Next token is token ')' (1.1: )
+Entering state 11
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
 Next token is token "number" (1.1: 1)
 Shifting token "number" (1.1: 1)
 Entering state 1
@@ -98905,9 +99174,36 @@
 -> $$ = nterm exp (1.1: 1)
 Entering state 12
 Reading a token
-Next token is token '+' (1.1: )
-Shifting token '+' (1.1: )
-Entering state 21
+Next token is token "number" (1.1: 2)
+syntax error
+Error: popping nterm exp (1.1: 1)
+Shifting token error (1.1: )
+Entering state 11
+Next token is token "number" (1.1: 2)
+Error: discarding token "number" (1.1: 2)
+Reading a token
+Next token is token ')' (1.1: )
+Entering state 11
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 30
+Reading a token
+Next token is token '=' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 2222)
+Entering state 8
+Next token is token '=' (1.1: )
+Shifting token '=' (1.1: )
+Entering state 19
 Reading a token
 Next token is token "number" (1.1: 1)
 Shifting token "number" (1.1: 1)
@@ -98915,28 +99211,78 @@
 Reducing stack 0 by rule 5 (line 81):
    $1 = token "number" (1.1: 1)
 -> $$ = nterm exp (1.1: 1)
-Entering state 30
+Entering state 28
 Reading a token
-Next token is token ')' (1.1: )
-Reducing stack 0 by rule 7 (line 98):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.1: )
+Next token is token '\n' (1.1: )
+Reducing stack 0 by rule 6 (line 82):
+   $1 = nterm exp (1.1: 2222)
+   $2 = token '=' (1.1: )
    $3 = nterm exp (1.1: 1)
--> $$ = nterm exp (1.1: 2)
-Entering state 12
+error: 2222 != 1
+-> $$ = nterm exp (1.1: 2222)
+Entering state 8
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 77):
+   $1 = nterm exp (1.1: 2222)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (1.1: )
+Entering state 17
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
+./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+./calc.at:1408: cat stderr
+./calc.at:1409:  $PREPARSER ./calc  /dev/null
+1.11-17: error: null divisor
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '-' (1.1: )
+Shifting token '-' (1.1: )
+Entering state 2
+Reading a token
+Next token is token '*' (1.1: )
+syntax error
+Shifting token error (1.1: )
+Entering state 9
+Reducing stack 0 by rule 15 (line 120):
+   $1 = token '-' (1.1: )
+   $2 = token error (1.1: )
+Shifting token error (1.1: )
+Entering state 11
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
+Reading a token
+Next token is token ')' (1.1: )
+Entering state 11
 Next token is token ')' (1.1: )
 Shifting token ')' (1.1: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 118):
+Entering state 26
+Reducing stack 0 by rule 14 (line 119):
    $1 = token '(' (1.1: )
-   $2 = nterm exp (1.1: 2)
+   $2 = token error (1.1: )
    $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 2)
+-> $$ = nterm exp (1.1: 1111)
 Entering state 8
 Reading a token
-Next token is token '/' (1.1: )
-Shifting token '/' (1.1: )
-Entering state 23
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
 Reading a token
 Next token is token '(' (1.1: )
 Shifting token '(' (1.1: )
@@ -98950,9 +99296,36 @@
 -> $$ = nterm exp (1.1: 1)
 Entering state 12
 Reading a token
-Next token is token '-' (1.1: )
-Shifting token '-' (1.1: )
-Entering state 20
+Next token is token "number" (1.1: 2)
+syntax error
+Error: popping nterm exp (1.1: 1)
+Shifting token error (1.1: )
+Entering state 11
+Next token is token "number" (1.1: 2)
+Error: discarding token "number" (1.1: 2)
+Reading a token
+Next token is token ')' (1.1: )
+Entering state 11
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 30
+Reading a token
+Next token is token '=' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 2222)
+Entering state 8
+Next token is token '=' (1.1: )
+Shifting token '=' (1.1: )
+Entering state 19
 Reading a token
 Next token is token "number" (1.1: 1)
 Shifting token "number" (1.1: 1)
@@ -98960,38 +99333,21 @@
 Reducing stack 0 by rule 5 (line 81):
    $1 = token "number" (1.1: 1)
 -> $$ = nterm exp (1.1: 1)
-Entering state 29
-Reading a token
-Next token is token ')' (1.1: )
-Reducing stack 0 by rule 8 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '-' (1.1: )
-   $3 = nterm exp (1.1: 1)
--> $$ = nterm exp (1.1: 0)
-Entering state 12
-Next token is token ')' (1.1: )
-Shifting token ')' (1.1: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 118):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.1: 0)
-   $3 = token ')' (1.1: )
--> $$ = nterm exp (1.1: 0)
-Entering state 32
+Entering state 28
 Reading a token
 Next token is token '\n' (1.1: )
-Reducing stack 0 by rule 10 (line 101):
-   $1 = nterm exp (1.1: 2)
-   $2 = token '/' (1.1: )
-   $3 = nterm exp (1.1: 0)
-error: null divisor
--> $$ = nterm exp (1.1: 2)
+Reducing stack 0 by rule 6 (line 82):
+   $1 = nterm exp (1.1: 2222)
+   $2 = token '=' (1.1: )
+   $3 = nterm exp (1.1: 1)
+error: 2222 != 1
+-> $$ = nterm exp (1.1: 2222)
 Entering state 8
 Next token is token '\n' (1.1: )
 Shifting token '\n' (1.1: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 77):
-   $1 = nterm exp (1.1: 2)
+   $1 = nterm exp (1.1: 2222)
    $2 = token '\n' (1.1: )
 -> $$ = nterm line (1.1: )
 Entering state 7
@@ -99005,31 +99361,20 @@
 Entering state 17
 Cleanup: popping token "end of input" (1.1: )
 Cleanup: popping nterm input (1.1: )
-./calc.at:1408: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1408: cat stderr
-./calc.at:1407: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1407: cat stderr
+521. calc.at:1416: testing Calculator %glr-parser %no-lines api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
+./calc.at:1416: mv calc.y.tmp calc.y
+
+./calc.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y
+stderr:
 input:
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" (1.1: )
   | (!!) + (1 2) = 1
-515. calc.at:1407:  ok
+./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1408:  $PREPARSER ./calc  input
 stderr:
 Starting parse
@@ -99144,11 +99489,18 @@
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-
 stderr:
 Starting parse
 Entering state 0
 Reading a token
+Now at end of input.
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" (1.1: )
+stderr:
+./calc.at:1405: cat stderr
+Starting parse
+Entering state 0
+Reading a token
 Next token is token '(' (1.1: )
 Shifting token '(' (1.1: )
 Entering state 4
@@ -99257,6 +99609,16 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1407: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1408: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -99268,9 +99630,27 @@
   }eg
 ' expout || exit 77
 ./calc.at:1408: cat stderr
+514. calc.at:1405:  ok
+./calc.at:1409: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 input:
+./calc.at:1409: cat stderr
   | (- *) + (1 2) = 1
 ./calc.at:1408:  $PREPARSER ./calc  input
+./calc.at:1407: cat stderr
+input:
+stderr:
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1409:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -99389,13 +99769,624 @@
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-523. calc.at:1431: testing Calculator C++   ...
-./calc.at:1431: mv calc.y.tmp calc.y
+  | (* *) + (*) + (*)
+./calc.at:1407:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token ')' (1.2: )
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Next token is token ')' (1.2: )
+Shifting token ')' (1.2: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.2: )
+-> $$ = nterm exp (1.1-2: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.6: )
+Shifting token '(' (1.6: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.7: 1)
+Shifting token "number" (1.7: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.7: 1)
+-> $$ = nterm exp (1.7: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.9: )
+Shifting token '+' (1.9: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.11: 1)
+Shifting token "number" (1.11: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.11: 1)
+-> $$ = nterm exp (1.11: 1)
+Entering state 30
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.7: 1)
+   $2 = token '+' (1.9: )
+   $3 = nterm exp (1.11: 1)
+-> $$ = nterm exp (1.7-11: 2)
+Entering state 12
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.15: 1)
+Shifting token "number" (1.15: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.15: 1)
+-> $$ = nterm exp (1.15: 1)
+Entering state 30
+Reading a token
+Next token is token '+' (1.17: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.7-11: 2)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15: 1)
+-> $$ = nterm exp (1.7-15: 3)
+Entering state 12
+Next token is token '+' (1.17: )
+Shifting token '+' (1.17: )
+Entering state 21
+Reading a token
+Next token is token ')' (1.18: )
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' (1.17: )
+Error: popping nterm exp (1.7-15: 3)
+Shifting token error (1.7-18: )
+Entering state 11
+Next token is token ')' (1.18: )
+Shifting token ')' (1.18: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.6: )
+   $2 = token error (1.7-18: )
+   $3 = token ')' (1.18: )
+-> $$ = nterm exp (1.6-18: 1111)
+Entering state 30
+Reading a token
+Next token is token '+' (1.20: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1-2: 1111)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6-18: 1111)
+-> $$ = nterm exp (1.1-18: 2222)
+Entering state 8
+Next token is token '+' (1.20: )
+Shifting token '+' (1.20: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.22: )
+Shifting token '(' (1.22: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.23: )
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.23: )
+Entering state 11
+Next token is token '*' (1.23: )
+Error: discarding token '*' (1.23: )
+Reading a token
+Next token is token '*' (1.25: )
+Error: discarding token '*' (1.25: )
+Reading a token
+Next token is token '*' (1.27: )
+Error: discarding token '*' (1.27: )
+Reading a token
+Next token is token ')' (1.28: )
+Entering state 11
+Next token is token ')' (1.28: )
+Shifting token ')' (1.28: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.22: )
+   $2 = token error (1.23-27: )
+   $3 = token ')' (1.28: )
+-> $$ = nterm exp (1.22-28: 1111)
+Entering state 30
+Reading a token
+Next token is token '+' (1.30: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1-18: 2222)
+   $2 = token '+' (1.20: )
+   $3 = nterm exp (1.22-28: 1111)
+-> $$ = nterm exp (1.1-28: 3333)
+Entering state 8
+Next token is token '+' (1.30: )
+Shifting token '+' (1.30: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.32: )
+Shifting token '(' (1.32: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.33: 1)
+Shifting token "number" (1.33: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.33: 1)
+-> $$ = nterm exp (1.33: 1)
+Entering state 12
+Reading a token
+Next token is token '*' (1.35: )
+Shifting token '*' (1.35: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.37: 2)
+Shifting token "number" (1.37: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.37: 2)
+-> $$ = nterm exp (1.37: 2)
+Entering state 31
+Reading a token
+Next token is token '*' (1.39: )
+Reducing stack 0 by rule 9 (line 107):
+   $1 = nterm exp (1.33: 1)
+   $2 = token '*' (1.35: )
+   $3 = nterm exp (1.37: 2)
+-> $$ = nterm exp (1.33-37: 2)
+Entering state 12
+Next token is token '*' (1.39: )
+Shifting token '*' (1.39: )
+Entering state 22
+Reading a token
+Next token is token '*' (1.41: )
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' (1.39: )
+Error: popping nterm exp (1.33-37: 2)
+Shifting token error (1.33-41: )
+Entering state 11
+Next token is token '*' (1.41: )
+Error: discarding token '*' (1.41: )
+Reading a token
+Next token is token ')' (1.42: )
+Entering state 11
+Next token is token ')' (1.42: )
+Shifting token ')' (1.42: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.32: )
+   $2 = token error (1.33-41: )
+   $3 = token ')' (1.42: )
+-> $$ = nterm exp (1.32-42: 1111)
+Entering state 30
+Reading a token
+Next token is token '=' (1.44: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1-28: 3333)
+   $2 = token '+' (1.30: )
+   $3 = nterm exp (1.32-42: 1111)
+-> $$ = nterm exp (1.1-42: 4444)
+Entering state 8
+Next token is token '=' (1.44: )
+Shifting token '=' (1.44: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.46: 1)
+Shifting token "number" (1.46: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.46: 1)
+-> $$ = nterm exp (1.46: 1)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.47-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-42: 4444)
+   $2 = token '=' (1.44: )
+   $3 = nterm exp (1.46: 1)
+1.1-46: error: 4444 != 1
+-> $$ = nterm exp (1.1-46: 4444)
+Entering state 8
+Next token is token '\n' (1.47-2.0: )
+Shifting token '\n' (1.47-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-46: 4444)
+   $2 = token '\n' (1.47-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 
 stderr:
-./calc.at:1431: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.1: )
+syntax error
+Shifting token error (1.1: )
+Entering state 11
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
+Reading a token
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
+Reading a token
+Next token is token ')' (1.1: )
+Entering state 11
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.1: )
+syntax error
+Shifting token error (1.1: )
+Entering state 11
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
+Reading a token
+Next token is token ')' (1.1: )
+Entering state 11
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 30
+Reading a token
+Next token is token '+' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 2222)
+Entering state 8
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.1: )
+syntax error
+Shifting token error (1.1: )
+Entering state 11
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
+Reading a token
+Next token is token ')' (1.1: )
+Entering state 11
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 30
+Reading a token
+Next token is token '\n' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 2222)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 3333)
+Entering state 8
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 77):
+   $1 = nterm exp (1.1: 3333)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (1.1: )
+Entering state 17
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
+./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token ')' (1.2: )
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Next token is token ')' (1.2: )
+Shifting token ')' (1.2: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.2: )
+-> $$ = nterm exp (1.1-2: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.6: )
+Shifting token '(' (1.6: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.7: 1)
+Shifting token "number" (1.7: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.7: 1)
+-> $$ = nterm exp (1.7: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.9: )
+Shifting token '+' (1.9: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.11: 1)
+Shifting token "number" (1.11: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.11: 1)
+-> $$ = nterm exp (1.11: 1)
+Entering state 30
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.7: 1)
+   $2 = token '+' (1.9: )
+   $3 = nterm exp (1.11: 1)
+-> $$ = nterm exp (1.7-11: 2)
+Entering state 12
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.15: 1)
+Shifting token "number" (1.15: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.15: 1)
+-> $$ = nterm exp (1.15: 1)
+Entering state 30
+Reading a token
+Next token is token '+' (1.17: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.7-11: 2)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15: 1)
+-> $$ = nterm exp (1.7-15: 3)
+Entering state 12
+Next token is token '+' (1.17: )
+Shifting token '+' (1.17: )
+Entering state 21
+Reading a token
+Next token is token ')' (1.18: )
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' (1.17: )
+Error: popping nterm exp (1.7-15: 3)
+Shifting token error (1.7-18: )
+Entering state 11
+Next token is token ')' (1.18: )
+Shifting token ')' (1.18: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.6: )
+   $2 = token error (1.7-18: )
+   $3 = token ')' (1.18: )
+-> $$ = nterm exp (1.6-18: 1111)
+Entering state 30
+Reading a token
+Next token is token '+' (1.20: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1-2: 1111)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6-18: 1111)
+-> $$ = nterm exp (1.1-18: 2222)
+Entering state 8
+Next token is token '+' (1.20: )
+Shifting token '+' (1.20: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.22: )
+Shifting token '(' (1.22: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.23: )
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.23: )
+Entering state 11
+Next token is token '*' (1.23: )
+Error: discarding token '*' (1.23: )
+Reading a token
+Next token is token '*' (1.25: )
+Error: discarding token '*' (1.25: )
+Reading a token
+Next token is token '*' (1.27: )
+Error: discarding token '*' (1.27: )
+Reading a token
+Next token is token ')' (1.28: )
+Entering state 11
+Next token is token ')' (1.28: )
+Shifting token ')' (1.28: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.22: )
+   $2 = token error (1.23-27: )
+   $3 = token ')' (1.28: )
+-> $$ = nterm exp (1.22-28: 1111)
+Entering state 30
+Reading a token
+Next token is token '+' (1.30: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1-18: 2222)
+   $2 = token '+' (1.20: )
+   $3 = nterm exp (1.22-28: 1111)
+-> $$ = nterm exp (1.1-28: 3333)
+Entering state 8
+Next token is token '+' (1.30: )
+Shifting token '+' (1.30: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.32: )
+Shifting token '(' (1.32: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.33: 1)
+Shifting token "number" (1.33: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.33: 1)
+-> $$ = nterm exp (1.33: 1)
+Entering state 12
+Reading a token
+Next token is token '*' (1.35: )
+Shifting token '*' (1.35: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.37: 2)
+Shifting token "number" (1.37: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.37: 2)
+-> $$ = nterm exp (1.37: 2)
+Entering state 31
+Reading a token
+Next token is token '*' (1.39: )
+Reducing stack 0 by rule 9 (line 107):
+   $1 = nterm exp (1.33: 1)
+   $2 = token '*' (1.35: )
+   $3 = nterm exp (1.37: 2)
+-> $$ = nterm exp (1.33-37: 2)
+Entering state 12
+Next token is token '*' (1.39: )
+Shifting token '*' (1.39: )
+Entering state 22
+Reading a token
+Next token is token '*' (1.41: )
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' (1.39: )
+Error: popping nterm exp (1.33-37: 2)
+Shifting token error (1.33-41: )
+Entering state 11
+Next token is token '*' (1.41: )
+Error: discarding token '*' (1.41: )
+Reading a token
+Next token is token ')' (1.42: )
+Entering state 11
+Next token is token ')' (1.42: )
+Shifting token ')' (1.42: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.32: )
+   $2 = token error (1.33-41: )
+   $3 = token ')' (1.42: )
+-> $$ = nterm exp (1.32-42: 1111)
+Entering state 30
+Reading a token
+Next token is token '=' (1.44: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1-28: 3333)
+   $2 = token '+' (1.30: )
+   $3 = nterm exp (1.32-42: 1111)
+-> $$ = nterm exp (1.1-42: 4444)
+Entering state 8
+Next token is token '=' (1.44: )
+Shifting token '=' (1.44: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.46: 1)
+Shifting token "number" (1.46: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.46: 1)
+-> $$ = nterm exp (1.46: 1)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.47-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-42: 4444)
+   $2 = token '=' (1.44: )
+   $3 = nterm exp (1.46: 1)
+1.1-46: error: 4444 != 1
+-> $$ = nterm exp (1.1-46: 4444)
+Entering state 8
+Next token is token '\n' (1.47-2.0: )
+Shifting token '\n' (1.47-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-46: 4444)
+   $2 = token '\n' (1.47-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 stderr:
-stdout:
 Starting parse
 Entering state 0
 Reading a token
@@ -99512,7 +100503,627 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1409: "$PERL" -ne '
+./calc.at:1409: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.1: )
+syntax error
+Shifting token error (1.1: )
+Entering state 11
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
+Reading a token
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
+Reading a token
+Next token is token ')' (1.1: )
+Entering state 11
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.1: )
+syntax error
+Shifting token error (1.1: )
+Entering state 11
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
+Reading a token
+Next token is token ')' (1.1: )
+Entering state 11
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 30
+Reading a token
+Next token is token '+' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 2222)
+Entering state 8
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.1: )
+syntax error
+Shifting token error (1.1: )
+Entering state 11
+Next token is token '*' (1.1: )
+Error: discarding token '*' (1.1: )
+Reading a token
+Next token is token ')' (1.1: )
+Entering state 11
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 30
+Reading a token
+Next token is token '\n' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 2222)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 3333)
+Entering state 8
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 77):
+   $1 = nterm exp (1.1: 3333)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (1.1: )
+Entering state 17
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
+./calc.at:1408: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1414: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
+./calc.at:1409: cat stderr
+./calc.at:1408: cat stderr
+input:
+./calc.at:1407: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1416: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS
+input:
+  | (* *) + (*) + (*)
+./calc.at:1408:  $PREPARSER ./calc  input
+  | (!!) + (1 2) = 1
+stderr:
+./calc.at:1409:  $PREPARSER ./calc  input
+./calc.at:1407: cat stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
+Reading a token
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Reading a token
+Next token is token ')' (1.5: )
+Entering state 11
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
+Entering state 11
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
+Reading a token
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.9-11: 1111)
+Entering state 30
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
+Entering state 8
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
+Reading a token
+Next token is token ')' (1.17: )
+Entering state 11
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 30
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
+Entering state 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-17: 3333)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '!' (1.2: )
+Shifting token '!' (1.2: )
+Entering state 5
+Reading a token
+Next token is token '!' (1.3: )
+Shifting token '!' (1.3: )
+Entering state 16
+Reducing stack 0 by rule 16 (line 128):
+   $1 = token '!' (1.2: )
+   $2 = token '!' (1.3: )
+Shifting token error (1.2-3: )
+Entering state 11
+Reading a token
+Next token is token ')' (1.4: )
+Shifting token ')' (1.4: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-3: )
+   $3 = token ')' (1.4: )
+-> $$ = nterm exp (1.1-4: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.6: )
+Shifting token '+' (1.6: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.8: )
+Shifting token '(' (1.8: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.9: 1)
+Shifting token "number" (1.9: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 1)
+-> $$ = nterm exp (1.9: 1)
+Entering state 12
+Reading a token
+Next token is token "number" (1.11: 2)
+1.11: syntax error, unexpected number
+Error: popping nterm exp (1.9: 1)
+Shifting token error (1.9-11: )
+Entering state 11
+Next token is token "number" (1.11: 2)
+Error: discarding token "number" (1.11: 2)
+Reading a token
+Next token is token ')' (1.12: )
+Entering state 11
+Next token is token ')' (1.12: )
+Shifting token ')' (1.12: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.8: )
+   $2 = token error (1.9-11: )
+   $3 = token ')' (1.12: )
+-> $$ = nterm exp (1.8-12: 1111)
+Entering state 30
+Reading a token
+Next token is token '=' (1.14: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1-4: 1111)
+   $2 = token '+' (1.6: )
+   $3 = nterm exp (1.8-12: 1111)
+-> $$ = nterm exp (1.1-12: 2222)
+Entering state 8
+Next token is token '=' (1.14: )
+Shifting token '=' (1.14: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.16: 1)
+Shifting token "number" (1.16: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-12: 2222)
+   $2 = token '=' (1.14: )
+   $3 = nterm exp (1.16: 1)
+1.1-16: error: 2222 != 1
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '!' (1.2: )
+Shifting token '!' (1.2: )
+Entering state 5
+Reading a token
+Next token is token '!' (1.3: )
+Shifting token '!' (1.3: )
+Entering state 16
+Reducing stack 0 by rule 16 (line 128):
+   $1 = token '!' (1.2: )
+   $2 = token '!' (1.3: )
+Shifting token error (1.2-3: )
+Entering state 11
+Reading a token
+Next token is token ')' (1.4: )
+Shifting token ')' (1.4: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-3: )
+   $3 = token ')' (1.4: )
+-> $$ = nterm exp (1.1-4: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.6: )
+Shifting token '+' (1.6: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.8: )
+Shifting token '(' (1.8: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.9: 1)
+Shifting token "number" (1.9: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 1)
+-> $$ = nterm exp (1.9: 1)
+Entering state 12
+Reading a token
+Next token is token "number" (1.11: 2)
+1.11: syntax error, unexpected number
+Error: popping nterm exp (1.9: 1)
+Shifting token error (1.9-11: )
+Entering state 11
+Next token is token "number" (1.11: 2)
+Error: discarding token "number" (1.11: 2)
+Reading a token
+Next token is token ')' (1.12: )
+Entering state 11
+Next token is token ')' (1.12: )
+Shifting token ')' (1.12: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.8: )
+   $2 = token error (1.9-11: )
+   $3 = token ')' (1.12: )
+-> $$ = nterm exp (1.8-12: 1111)
+Entering state 30
+Reading a token
+Next token is token '=' (1.14: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1-4: 1111)
+   $2 = token '+' (1.6: )
+   $3 = nterm exp (1.8-12: 1111)
+-> $$ = nterm exp (1.1-12: 2222)
+Entering state 8
+Next token is token '=' (1.14: )
+Shifting token '=' (1.14: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.16: 1)
+Shifting token "number" (1.16: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-12: 2222)
+   $2 = token '=' (1.14: )
+   $3 = nterm exp (1.16: 1)
+1.1-16: error: 2222 != 1
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+522. calc.at:1426: testing Calculator lalr1.cc %header  ...
+./calc.at:1426: mv calc.y.tmp calc.y
+
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
+Reading a token
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Reading a token
+Next token is token ')' (1.5: )
+Entering state 11
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
+Entering state 11
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
+Reading a token
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.9-11: 1111)
+Entering state 30
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
+Entering state 8
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
+Reading a token
+Next token is token ')' (1.17: )
+Entering state 11
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 30
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
+Entering state 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-17: 3333)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+input:
+stdout:
+./calc.at:1411: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -99523,7 +101134,10 @@
         || /\t/
         )' calc.c calc.h
 
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1407:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1426: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 input:
 ./calc.at:1408: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
@@ -99535,7 +101149,72 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stdout:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.1: 2)
+Shifting token "number" (1.1: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 2)
+-> $$ = nterm exp (1.1: 2)
+Entering state 30
+Reading a token
+Next token is token '*' (1.1: )
+Shifting token '*' (1.1: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.1: 3)
+Shifting token "number" (1.1: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 3)
+-> $$ = nterm exp (1.1: 3)
+Entering state 31
+Reading a token
+Next token is token '+' (1.1: )
+Reducing stack 0 by rule 9 (line 100):
+   $1 = nterm exp (1.1: 2)
+   $2 = token '*' (1.1: )
+   $3 = nterm exp (1.1: 3)
+-> $$ = nterm exp (1.1: 6)
+Entering state 30
+Next token is token '+' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 6)
+-> $$ = nterm exp (1.1: 7)
+Entering state 8
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+Next token is token '!' (1.1: )
+Shifting token '!' (1.1: )
+Entering state 5
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 14
+Reducing stack 0 by rule 17 (line 122):
+   $1 = token '!' (1.1: )
+   $2 = token '+' (1.1: )
+Cleanup: popping token '+' (1.1: )
+Cleanup: popping nterm exp (1.1: 7)
+./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -99549,20 +101228,222 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1409:  $PREPARSER ./calc  input
-./calc.at:1411: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c calc.h
-
+./calc.at:1411:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.1: 2)
+Shifting token "number" (1.1: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 2)
+-> $$ = nterm exp (1.1: 2)
+Entering state 30
+Reading a token
+Next token is token '*' (1.1: )
+Shifting token '*' (1.1: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.1: 3)
+Shifting token "number" (1.1: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 3)
+-> $$ = nterm exp (1.1: 3)
+Entering state 31
+Reading a token
+Next token is token '+' (1.1: )
+Reducing stack 0 by rule 9 (line 100):
+   $1 = nterm exp (1.1: 2)
+   $2 = token '*' (1.1: )
+   $3 = nterm exp (1.1: 3)
+-> $$ = nterm exp (1.1: 6)
+Entering state 30
+Next token is token '+' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 6)
+-> $$ = nterm exp (1.1: 7)
+Entering state 8
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+Next token is token '!' (1.1: )
+Shifting token '!' (1.1: )
+Entering state 5
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 14
+Reducing stack 0 by rule 17 (line 122):
+   $1 = token '!' (1.1: )
+   $2 = token '+' (1.1: )
+Cleanup: popping token '+' (1.1: )
+Cleanup: popping nterm exp (1.1: 7)
+input:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1409: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1407:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.1: 2)
+Shifting token "number" (1.1: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 2)
+-> $$ = nterm exp (1.1: 2)
+Entering state 30
+Reading a token
+Next token is token '*' (1.1: )
+Shifting token '*' (1.1: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.1: 3)
+Shifting token "number" (1.1: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 3)
+-> $$ = nterm exp (1.1: 3)
+Entering state 31
+Reading a token
+Next token is token '+' (1.1: )
+Reducing stack 0 by rule 9 (line 100):
+   $1 = nterm exp (1.1: 2)
+   $2 = token '*' (1.1: )
+   $3 = nterm exp (1.1: 3)
+-> $$ = nterm exp (1.1: 6)
+Entering state 30
+Next token is token '+' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 6)
+-> $$ = nterm exp (1.1: 7)
+Entering state 8
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+Next token is token '!' (1.1: )
+Shifting token '!' (1.1: )
+Entering state 5
+Reading a token
+Next token is token '-' (1.1: )
+Shifting token '-' (1.1: )
+Entering state 13
+Reducing stack 0 by rule 18 (line 123):
+   $1 = token '!' (1.1: )
+   $2 = token '-' (1.1: )
+Cleanup: popping token '+' (1.1: )
+Cleanup: popping nterm exp (1.1: 7)
+./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1408: cat stderr
 stderr:
+stderr:
+./calc.at:1409: cat stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.1: 2)
+Shifting token "number" (1.1: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 2)
+-> $$ = nterm exp (1.1: 2)
+Entering state 30
+Reading a token
+Next token is token '*' (1.1: )
+Shifting token '*' (1.1: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.1: 3)
+Shifting token "number" (1.1: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 3)
+-> $$ = nterm exp (1.1: 3)
+Entering state 31
+Reading a token
+Next token is token '+' (1.1: )
+Reducing stack 0 by rule 9 (line 100):
+   $1 = nterm exp (1.1: 2)
+   $2 = token '*' (1.1: )
+   $3 = nterm exp (1.1: 3)
+-> $$ = nterm exp (1.1: 6)
+Entering state 30
+Next token is token '+' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 6)
+-> $$ = nterm exp (1.1: 7)
+Entering state 8
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+Next token is token '!' (1.1: )
+Shifting token '!' (1.1: )
+Entering state 5
+Reading a token
+Next token is token '-' (1.1: )
+Shifting token '-' (1.1: )
+Entering state 13
+Reducing stack 0 by rule 18 (line 123):
+   $1 = token '!' (1.1: )
+   $2 = token '-' (1.1: )
+Cleanup: popping token '+' (1.1: )
+Cleanup: popping nterm exp (1.1: 7)
 Starting parse
 Entering state 0
 Reading a token
@@ -99599,14 +101480,14 @@
 Entering state 31
 Reading a token
 Next token is token '=' (1.11: )
-Reducing stack 0 by rule 9 (line 107):
+Reducing stack 0 by rule 9 (line 101):
    $1 = nterm exp (1.5: 2)
    $2 = token '*' (1.7: )
    $3 = nterm exp (1.9: 3)
 -> $$ = nterm exp (1.5-9: 6)
 Entering state 30
 Next token is token '=' (1.11: )
-Reducing stack 0 by rule 7 (line 105):
+Reducing stack 0 by rule 7 (line 99):
    $1 = nterm exp (1.1: 1)
    $2 = token '+' (1.3: )
    $3 = nterm exp (1.5-9: 6)
@@ -99681,20 +101562,20 @@
 Entering state 10
 Reading a token
 Next token is token '=' (2.12: )
-Reducing stack 0 by rule 11 (line 123):
+Reducing stack 0 by rule 11 (line 111):
    $1 = token '-' (2.9: )
    $2 = nterm exp (2.10: 3)
 -> $$ = nterm exp (2.9-10: -3)
 Entering state 31
 Next token is token '=' (2.12: )
-Reducing stack 0 by rule 9 (line 107):
+Reducing stack 0 by rule 9 (line 101):
    $1 = nterm exp (2.5: 2)
    $2 = token '*' (2.7: )
    $3 = nterm exp (2.9-10: -3)
 -> $$ = nterm exp (2.5-10: -6)
 Entering state 30
 Next token is token '=' (2.12: )
-Reducing stack 0 by rule 7 (line 105):
+Reducing stack 0 by rule 7 (line 99):
    $1 = nterm exp (2.1: 1)
    $2 = token '+' (2.3: )
    $3 = nterm exp (2.5-10: -6)
@@ -99717,7 +101598,7 @@
 Entering state 10
 Reading a token
 Next token is token '\n' (2.16-3.0: )
-Reducing stack 0 by rule 11 (line 123):
+Reducing stack 0 by rule 11 (line 111):
    $1 = token '-' (2.14: )
    $2 = nterm exp (2.15: 5)
 -> $$ = nterm exp (2.14-15: -5)
@@ -99781,14 +101662,14 @@
 Entering state 33
 Reading a token
 Next token is token '=' (4.6: )
-Reducing stack 0 by rule 12 (line 124):
+Reducing stack 0 by rule 12 (line 112):
    $1 = nterm exp (4.2: 1)
    $2 = token '^' (4.3: )
    $3 = nterm exp (4.4: 2)
 -> $$ = nterm exp (4.2-4: 1)
 Entering state 10
 Next token is token '=' (4.6: )
-Reducing stack 0 by rule 11 (line 123):
+Reducing stack 0 by rule 11 (line 111):
    $1 = token '-' (4.1: )
    $2 = nterm exp (4.2-4: 1)
 -> $$ = nterm exp (4.1-4: -1)
@@ -99810,7 +101691,7 @@
 Entering state 10
 Reading a token
 Next token is token '\n' (4.10-5.0: )
-Reducing stack 0 by rule 11 (line 123):
+Reducing stack 0 by rule 11 (line 111):
    $1 = token '-' (4.8: )
    $2 = nterm exp (4.9: 1)
 -> $$ = nterm exp (4.8-9: -1)
@@ -99853,7 +101734,7 @@
 Entering state 10
 Reading a token
 Next token is token ')' (5.4: )
-Reducing stack 0 by rule 11 (line 123):
+Reducing stack 0 by rule 11 (line 111):
    $1 = token '-' (5.2: )
    $2 = nterm exp (5.3: 1)
 -> $$ = nterm exp (5.2-3: -1)
@@ -99861,7 +101742,7 @@
 Next token is token ')' (5.4: )
 Shifting token ')' (5.4: )
 Entering state 27
-Reducing stack 0 by rule 13 (line 125):
+Reducing stack 0 by rule 13 (line 113):
    $1 = token '(' (5.1: )
    $2 = nterm exp (5.2-3: -1)
    $3 = token ')' (5.4: )
@@ -99881,7 +101762,7 @@
 Entering state 33
 Reading a token
 Next token is token '=' (5.8: )
-Reducing stack 0 by rule 12 (line 124):
+Reducing stack 0 by rule 12 (line 112):
    $1 = nterm exp (5.1-4: -1)
    $2 = token '^' (5.5: )
    $3 = nterm exp (5.6: 2)
@@ -99954,19 +101835,19 @@
 Entering state 10
 Reading a token
 Next token is token '=' (7.6: )
-Reducing stack 0 by rule 11 (line 123):
+Reducing stack 0 by rule 11 (line 111):
    $1 = token '-' (7.3: )
    $2 = nterm exp (7.4: 1)
 -> $$ = nterm exp (7.3-4: -1)
 Entering state 10
 Next token is token '=' (7.6: )
-Reducing stack 0 by rule 11 (line 123):
+Reducing stack 0 by rule 11 (line 111):
    $1 = token '-' (7.2: )
    $2 = nterm exp (7.3-4: -1)
 -> $$ = nterm exp (7.2-4: 1)
 Entering state 10
 Next token is token '=' (7.6: )
-Reducing stack 0 by rule 11 (line 123):
+Reducing stack 0 by rule 11 (line 111):
    $1 = token '-' (7.1: )
    $2 = nterm exp (7.2-4: 1)
 -> $$ = nterm exp (7.1-4: -1)
@@ -99988,7 +101869,7 @@
 Entering state 10
 Reading a token
 Next token is token '\n' (7.10-8.0: )
-Reducing stack 0 by rule 11 (line 123):
+Reducing stack 0 by rule 11 (line 111):
    $1 = token '-' (7.8: )
    $2 = nterm exp (7.9: 1)
 -> $$ = nterm exp (7.8-9: -1)
@@ -100048,7 +101929,7 @@
 Entering state 29
 Reading a token
 Next token is token '-' (9.7: )
-Reducing stack 0 by rule 8 (line 106):
+Reducing stack 0 by rule 8 (line 100):
    $1 = nterm exp (9.1: 1)
    $2 = token '-' (9.3: )
    $3 = nterm exp (9.5: 2)
@@ -100067,7 +101948,7 @@
 Entering state 29
 Reading a token
 Next token is token '=' (9.11: )
-Reducing stack 0 by rule 8 (line 106):
+Reducing stack 0 by rule 8 (line 100):
    $1 = nterm exp (9.1-5: -1)
    $2 = token '-' (9.7: )
    $3 = nterm exp (9.9: 3)
@@ -100090,7 +101971,7 @@
 Entering state 10
 Reading a token
 Next token is token '\n' (9.15-10.0: )
-Reducing stack 0 by rule 11 (line 123):
+Reducing stack 0 by rule 11 (line 111):
    $1 = token '-' (9.13: )
    $2 = nterm exp (9.14: 4)
 -> $$ = nterm exp (9.13-14: -4)
@@ -100153,7 +102034,7 @@
 Entering state 29
 Reading a token
 Next token is token ')' (10.11: )
-Reducing stack 0 by rule 8 (line 106):
+Reducing stack 0 by rule 8 (line 100):
    $1 = nterm exp (10.6: 2)
    $2 = token '-' (10.8: )
    $3 = nterm exp (10.10: 3)
@@ -100162,7 +102043,7 @@
 Next token is token ')' (10.11: )
 Shifting token ')' (10.11: )
 Entering state 27
-Reducing stack 0 by rule 13 (line 125):
+Reducing stack 0 by rule 13 (line 113):
    $1 = token '(' (10.5: )
    $2 = nterm exp (10.6-10: -1)
    $3 = token ')' (10.11: )
@@ -100170,7 +102051,7 @@
 Entering state 29
 Reading a token
 Next token is token '=' (10.13: )
-Reducing stack 0 by rule 8 (line 106):
+Reducing stack 0 by rule 8 (line 100):
    $1 = nterm exp (10.1: 1)
    $2 = token '-' (10.3: )
    $3 = nterm exp (10.5-11: -1)
@@ -100255,14 +102136,14 @@
 Entering state 33
 Reading a token
 Next token is token '=' (12.7: )
-Reducing stack 0 by rule 12 (line 124):
+Reducing stack 0 by rule 12 (line 112):
    $1 = nterm exp (12.3: 2)
    $2 = token '^' (12.4: )
    $3 = nterm exp (12.5: 3)
 -> $$ = nterm exp (12.3-5: 8)
 Entering state 33
 Next token is token '=' (12.7: )
-Reducing stack 0 by rule 12 (line 124):
+Reducing stack 0 by rule 12 (line 112):
    $1 = nterm exp (12.1: 2)
    $2 = token '^' (12.2: )
    $3 = nterm exp (12.3-5: 8)
@@ -100326,7 +102207,7 @@
 Entering state 33
 Reading a token
 Next token is token ')' (13.5: )
-Reducing stack 0 by rule 12 (line 124):
+Reducing stack 0 by rule 12 (line 112):
    $1 = nterm exp (13.2: 2)
    $2 = token '^' (13.3: )
    $3 = nterm exp (13.4: 2)
@@ -100335,7 +102216,7 @@
 Next token is token ')' (13.5: )
 Shifting token ')' (13.5: )
 Entering state 27
-Reducing stack 0 by rule 13 (line 125):
+Reducing stack 0 by rule 13 (line 113):
    $1 = token '(' (13.1: )
    $2 = nterm exp (13.2-4: 4)
    $3 = token ')' (13.5: )
@@ -100355,7 +102236,7 @@
 Entering state 33
 Reading a token
 Next token is token '=' (13.9: )
-Reducing stack 0 by rule 12 (line 124):
+Reducing stack 0 by rule 12 (line 112):
    $1 = nterm exp (13.1-5: 4)
    $2 = token '^' (13.6: )
    $3 = nterm exp (13.7: 3)
@@ -100399,23 +102280,11 @@
 Entering state 17
 Cleanup: popping token "end of input" (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
-./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1411:  $PREPARSER ./calc  input
 input:
+./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1408:  $PREPARSER ./calc  input
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -100452,20 +102321,86 @@
 -> $$ = nterm exp (1.9: 3)
 Entering state 31
 Reading a token
-Next token is token '=' (1.11: )
+Next token is token '+' (1.11: )
 Reducing stack 0 by rule 9 (line 107):
    $1 = nterm exp (1.5: 2)
    $2 = token '*' (1.7: )
    $3 = nterm exp (1.9: 3)
 -> $$ = nterm exp (1.5-9: 6)
 Entering state 30
-Next token is token '=' (1.11: )
+Next token is token '+' (1.11: )
 Reducing stack 0 by rule 7 (line 105):
    $1 = nterm exp (1.1: 1)
    $2 = token '+' (1.3: )
    $3 = nterm exp (1.5-9: 6)
 -> $$ = nterm exp (1.1-9: 7)
 Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Reducing stack 0 by rule 17 (line 129):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
 Next token is token '=' (1.11: )
 Shifting token '=' (1.11: )
 Entering state 19
@@ -100535,20 +102470,20 @@
 Entering state 10
 Reading a token
 Next token is token '=' (2.12: )
-Reducing stack 0 by rule 11 (line 123):
+Reducing stack 0 by rule 11 (line 111):
    $1 = token '-' (2.9: )
    $2 = nterm exp (2.10: 3)
 -> $$ = nterm exp (2.9-10: -3)
 Entering state 31
 Next token is token '=' (2.12: )
-Reducing stack 0 by rule 9 (line 107):
+Reducing stack 0 by rule 9 (line 101):
    $1 = nterm exp (2.5: 2)
    $2 = token '*' (2.7: )
    $3 = nterm exp (2.9-10: -3)
 -> $$ = nterm exp (2.5-10: -6)
 Entering state 30
 Next token is token '=' (2.12: )
-Reducing stack 0 by rule 7 (line 105):
+Reducing stack 0 by rule 7 (line 99):
    $1 = nterm exp (2.1: 1)
    $2 = token '+' (2.3: )
    $3 = nterm exp (2.5-10: -6)
@@ -100571,7 +102506,7 @@
 Entering state 10
 Reading a token
 Next token is token '\n' (2.16-3.0: )
-Reducing stack 0 by rule 11 (line 123):
+Reducing stack 0 by rule 11 (line 111):
    $1 = token '-' (2.14: )
    $2 = nterm exp (2.15: 5)
 -> $$ = nterm exp (2.14-15: -5)
@@ -100635,14 +102570,14 @@
 Entering state 33
 Reading a token
 Next token is token '=' (4.6: )
-Reducing stack 0 by rule 12 (line 124):
+Reducing stack 0 by rule 12 (line 112):
    $1 = nterm exp (4.2: 1)
    $2 = token '^' (4.3: )
    $3 = nterm exp (4.4: 2)
 -> $$ = nterm exp (4.2-4: 1)
 Entering state 10
 Next token is token '=' (4.6: )
-Reducing stack 0 by rule 11 (line 123):
+Reducing stack 0 by rule 11 (line 111):
    $1 = token '-' (4.1: )
    $2 = nterm exp (4.2-4: 1)
 -> $$ = nterm exp (4.1-4: -1)
@@ -100664,7 +102599,7 @@
 Entering state 10
 Reading a token
 Next token is token '\n' (4.10-5.0: )
-Reducing stack 0 by rule 11 (line 123):
+Reducing stack 0 by rule 11 (line 111):
    $1 = token '-' (4.8: )
    $2 = nterm exp (4.9: 1)
 -> $$ = nterm exp (4.8-9: -1)
@@ -100707,7 +102642,7 @@
 Entering state 10
 Reading a token
 Next token is token ')' (5.4: )
-Reducing stack 0 by rule 11 (line 123):
+Reducing stack 0 by rule 11 (line 111):
    $1 = token '-' (5.2: )
    $2 = nterm exp (5.3: 1)
 -> $$ = nterm exp (5.2-3: -1)
@@ -100715,7 +102650,7 @@
 Next token is token ')' (5.4: )
 Shifting token ')' (5.4: )
 Entering state 27
-Reducing stack 0 by rule 13 (line 125):
+Reducing stack 0 by rule 13 (line 113):
    $1 = token '(' (5.1: )
    $2 = nterm exp (5.2-3: -1)
    $3 = token ')' (5.4: )
@@ -100735,7 +102670,7 @@
 Entering state 33
 Reading a token
 Next token is token '=' (5.8: )
-Reducing stack 0 by rule 12 (line 124):
+Reducing stack 0 by rule 12 (line 112):
    $1 = nterm exp (5.1-4: -1)
    $2 = token '^' (5.5: )
    $3 = nterm exp (5.6: 2)
@@ -100808,19 +102743,19 @@
 Entering state 10
 Reading a token
 Next token is token '=' (7.6: )
-Reducing stack 0 by rule 11 (line 123):
+Reducing stack 0 by rule 11 (line 111):
    $1 = token '-' (7.3: )
    $2 = nterm exp (7.4: 1)
 -> $$ = nterm exp (7.3-4: -1)
 Entering state 10
 Next token is token '=' (7.6: )
-Reducing stack 0 by rule 11 (line 123):
+Reducing stack 0 by rule 11 (line 111):
    $1 = token '-' (7.2: )
    $2 = nterm exp (7.3-4: -1)
 -> $$ = nterm exp (7.2-4: 1)
 Entering state 10
 Next token is token '=' (7.6: )
-Reducing stack 0 by rule 11 (line 123):
+Reducing stack 0 by rule 11 (line 111):
    $1 = token '-' (7.1: )
    $2 = nterm exp (7.2-4: 1)
 -> $$ = nterm exp (7.1-4: -1)
@@ -100842,7 +102777,7 @@
 Entering state 10
 Reading a token
 Next token is token '\n' (7.10-8.0: )
-Reducing stack 0 by rule 11 (line 123):
+Reducing stack 0 by rule 11 (line 111):
    $1 = token '-' (7.8: )
    $2 = nterm exp (7.9: 1)
 -> $$ = nterm exp (7.8-9: -1)
@@ -100902,7 +102837,7 @@
 Entering state 29
 Reading a token
 Next token is token '-' (9.7: )
-Reducing stack 0 by rule 8 (line 106):
+Reducing stack 0 by rule 8 (line 100):
    $1 = nterm exp (9.1: 1)
    $2 = token '-' (9.3: )
    $3 = nterm exp (9.5: 2)
@@ -100921,7 +102856,7 @@
 Entering state 29
 Reading a token
 Next token is token '=' (9.11: )
-Reducing stack 0 by rule 8 (line 106):
+Reducing stack 0 by rule 8 (line 100):
    $1 = nterm exp (9.1-5: -1)
    $2 = token '-' (9.7: )
    $3 = nterm exp (9.9: 3)
@@ -100944,7 +102879,7 @@
 Entering state 10
 Reading a token
 Next token is token '\n' (9.15-10.0: )
-Reducing stack 0 by rule 11 (line 123):
+Reducing stack 0 by rule 11 (line 111):
    $1 = token '-' (9.13: )
    $2 = nterm exp (9.14: 4)
 -> $$ = nterm exp (9.13-14: -4)
@@ -101007,7 +102942,7 @@
 Entering state 29
 Reading a token
 Next token is token ')' (10.11: )
-Reducing stack 0 by rule 8 (line 106):
+Reducing stack 0 by rule 8 (line 100):
    $1 = nterm exp (10.6: 2)
    $2 = token '-' (10.8: )
    $3 = nterm exp (10.10: 3)
@@ -101016,7 +102951,7 @@
 Next token is token ')' (10.11: )
 Shifting token ')' (10.11: )
 Entering state 27
-Reducing stack 0 by rule 13 (line 125):
+Reducing stack 0 by rule 13 (line 113):
    $1 = token '(' (10.5: )
    $2 = nterm exp (10.6-10: -1)
    $3 = token ')' (10.11: )
@@ -101024,7 +102959,7 @@
 Entering state 29
 Reading a token
 Next token is token '=' (10.13: )
-Reducing stack 0 by rule 8 (line 106):
+Reducing stack 0 by rule 8 (line 100):
    $1 = nterm exp (10.1: 1)
    $2 = token '-' (10.3: )
    $3 = nterm exp (10.5-11: -1)
@@ -101109,14 +103044,14 @@
 Entering state 33
 Reading a token
 Next token is token '=' (12.7: )
-Reducing stack 0 by rule 12 (line 124):
+Reducing stack 0 by rule 12 (line 112):
    $1 = nterm exp (12.3: 2)
    $2 = token '^' (12.4: )
    $3 = nterm exp (12.5: 3)
 -> $$ = nterm exp (12.3-5: 8)
 Entering state 33
 Next token is token '=' (12.7: )
-Reducing stack 0 by rule 12 (line 124):
+Reducing stack 0 by rule 12 (line 112):
    $1 = nterm exp (12.1: 2)
    $2 = token '^' (12.2: )
    $3 = nterm exp (12.3-5: 8)
@@ -101180,7 +103115,7 @@
 Entering state 33
 Reading a token
 Next token is token ')' (13.5: )
-Reducing stack 0 by rule 12 (line 124):
+Reducing stack 0 by rule 12 (line 112):
    $1 = nterm exp (13.2: 2)
    $2 = token '^' (13.3: )
    $3 = nterm exp (13.4: 2)
@@ -101189,7 +103124,7 @@
 Next token is token ')' (13.5: )
 Shifting token ')' (13.5: )
 Entering state 27
-Reducing stack 0 by rule 13 (line 125):
+Reducing stack 0 by rule 13 (line 113):
    $1 = token '(' (13.1: )
    $2 = nterm exp (13.2-4: 4)
    $3 = token ')' (13.5: )
@@ -101209,7 +103144,7 @@
 Entering state 33
 Reading a token
 Next token is token '=' (13.9: )
-Reducing stack 0 by rule 12 (line 124):
+Reducing stack 0 by rule 12 (line 112):
    $1 = nterm exp (13.1-5: 4)
    $2 = token '^' (13.6: )
    $3 = nterm exp (13.7: 3)
@@ -101253,10 +103188,13 @@
 Entering state 17
 Cleanup: popping token "end of input" (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
-stderr:
 input:
-  | (* *) + (*) + (*)
-./calc.at:1408:  $PREPARSER ./calc  input
+input:
+stderr:
+  | (- *) + (1 2) = 1
+./calc.at:1409:  $PREPARSER ./calc  input
+  | 1 2
+./calc.at:1411:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -101292,811 +103230,702 @@
 -> $$ = nterm exp (1.9: 3)
 Entering state 31
 Reading a token
-Next token is token '=' (1.11: )
-Reducing stack 0 by rule 9 (line 101):
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 107):
    $1 = nterm exp (1.5: 2)
    $2 = token '*' (1.7: )
    $3 = nterm exp (1.9: 3)
 -> $$ = nterm exp (1.5-9: 6)
 Entering state 30
-Next token is token '=' (1.11: )
-Reducing stack 0 by rule 7 (line 99):
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 105):
    $1 = nterm exp (1.1: 1)
    $2 = token '+' (1.3: )
    $3 = nterm exp (1.5-9: 6)
 -> $$ = nterm exp (1.1-9: 7)
 Entering state 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 19
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
 Reading a token
-Next token is token "number" (1.13: 7)
-Shifting token "number" (1.13: 7)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.13: 7)
--> $$ = nterm exp (1.13: 7)
-Entering state 28
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
 Reading a token
-Next token is token '\n' (1.14-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-9: 7)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13: 7)
--> $$ = nterm exp (1.1-13: 7)
-Entering state 8
-Next token is token '\n' (1.14-2.0: )
-Shifting token '\n' (1.14-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-13: 7)
-   $2 = token '\n' (1.14-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Reducing stack 0 by rule 17 (line 129):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+stderr:
+stderr:
+input:
+Starting parse
+Entering state 0
 Reading a token
-Next token is token "number" (2.1: 1)
-Shifting token "number" (2.1: 1)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (2.1: 1)
--> $$ = nterm exp (2.1: 1)
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
 Reading a token
-Next token is token '+' (2.3: )
-Shifting token '+' (2.3: )
-Entering state 21
-Reading a token
-Next token is token "number" (2.5: 2)
-Shifting token "number" (2.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (2.5: 2)
--> $$ = nterm exp (2.5: 2)
-Entering state 30
+Next token is token "number" (1.3: 2)
+1.3: syntax error, unexpected number
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token "number" (1.3: 2)
+./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
 Reading a token
-Next token is token '*' (2.7: )
-Shifting token '*' (2.7: )
-Entering state 22
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
 Reading a token
-Next token is token '-' (2.9: )
-Shifting token '-' (2.9: )
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
 Entering state 2
 Reading a token
-Next token is token "number" (2.10: 3)
-Shifting token "number" (2.10: 3)
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Reducing stack 0 by rule 15 (line 127):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
+Shifting token error (1.2-4: )
+Entering state 11
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Reading a token
+Next token is token ')' (1.5: )
+Entering state 11
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.10: 1)
+Shifting token "number" (1.10: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (2.10: 3)
--> $$ = nterm exp (2.10: 3)
-Entering state 10
+   $1 = token "number" (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
+Entering state 12
 Reading a token
-Next token is token '=' (2.12: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (2.9: )
-   $2 = nterm exp (2.10: 3)
--> $$ = nterm exp (2.9-10: -3)
-Entering state 31
-Next token is token '=' (2.12: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (2.5: 2)
-   $2 = token '*' (2.7: )
-   $3 = nterm exp (2.9-10: -3)
--> $$ = nterm exp (2.5-10: -6)
+Next token is token "number" (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
+Shifting token error (1.10-12: )
+Entering state 11
+Next token is token "number" (1.12: 2)
+Error: discarding token "number" (1.12: 2)
+Reading a token
+Next token is token ')' (1.13: )
+Entering state 11
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
 Entering state 30
-Next token is token '=' (2.12: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (2.1: 1)
-   $2 = token '+' (2.3: )
-   $3 = nterm exp (2.5-10: -6)
--> $$ = nterm exp (2.1-10: -5)
+Reading a token
+Next token is token '=' (1.15: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
 Entering state 8
-Next token is token '=' (2.12: )
-Shifting token '=' (2.12: )
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
 Entering state 19
 Reading a token
-Next token is token '-' (2.14: )
-Shifting token '-' (2.14: )
-Entering state 2
-Reading a token
-Next token is token "number" (2.15: 5)
-Shifting token "number" (2.15: 5)
+Next token is token "number" (1.17: 1)
+Shifting token "number" (1.17: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (2.15: 5)
--> $$ = nterm exp (2.15: 5)
-Entering state 10
-Reading a token
-Next token is token '\n' (2.16-3.0: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (2.14: )
-   $2 = nterm exp (2.15: 5)
--> $$ = nterm exp (2.14-15: -5)
+   $1 = token "number" (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
 Entering state 28
-Next token is token '\n' (2.16-3.0: )
+Reading a token
+Next token is token '\n' (1.18-2.0: )
 Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (2.1-10: -5)
-   $2 = token '=' (2.12: )
-   $3 = nterm exp (2.14-15: -5)
--> $$ = nterm exp (2.1-15: -5)
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
 Entering state 8
-Next token is token '\n' (2.16-3.0: )
-Shifting token '\n' (2.16-3.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (2.1-15: -5)
-   $2 = token '\n' (2.16-3.0: )
--> $$ = nterm line (2.1-3.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-2.0: )
-   $2 = nterm line (2.1-3.0: )
--> $$ = nterm input (1.1-3.0: )
-Entering state 6
-Reading a token
-Next token is token '\n' (3.1-4.0: )
-Shifting token '\n' (3.1-4.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (3.1-4.0: )
--> $$ = nterm line (3.1-4.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-3.0: )
-   $2 = nterm line (3.1-4.0: )
--> $$ = nterm input (1.1-4.0: )
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Reading a token
-Next token is token '-' (4.1: )
-Shifting token '-' (4.1: )
-Entering state 2
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1407: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 + !- ++
+./calc.at:1408:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
 Reading a token
-Next token is token "number" (4.2: 1)
-Shifting token "number" (4.2: 1)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (4.2: 1)
--> $$ = nterm exp (4.2: 1)
-Entering state 10
-Reading a token
-Next token is token '^' (4.3: )
-Shifting token '^' (4.3: )
-Entering state 24
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
 Reading a token
-Next token is token "number" (4.4: 2)
-Shifting token "number" (4.4: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (4.4: 2)
--> $$ = nterm exp (4.4: 2)
-Entering state 33
+Next token is token "number" (1.3: 2)
+1.3: syntax error, unexpected number
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token "number" (1.3: 2)
+stderr:
+Starting parse
+Entering state 0
 Reading a token
-Next token is token '=' (4.6: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (4.2: 1)
-   $2 = token '^' (4.3: )
-   $3 = nterm exp (4.4: 2)
--> $$ = nterm exp (4.2-4: 1)
-Entering state 10
-Next token is token '=' (4.6: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (4.1: )
-   $2 = nterm exp (4.2-4: 1)
--> $$ = nterm exp (4.1-4: -1)
-Entering state 8
-Next token is token '=' (4.6: )
-Shifting token '=' (4.6: )
-Entering state 19
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
 Reading a token
-Next token is token '-' (4.8: )
-Shifting token '-' (4.8: )
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
 Entering state 2
 Reading a token
-Next token is token "number" (4.9: 1)
-Shifting token "number" (4.9: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (4.9: 1)
--> $$ = nterm exp (4.9: 1)
-Entering state 10
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Reducing stack 0 by rule 15 (line 127):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
+Shifting token error (1.2-4: )
+Entering state 11
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
 Reading a token
-Next token is token '\n' (4.10-5.0: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (4.8: )
-   $2 = nterm exp (4.9: 1)
--> $$ = nterm exp (4.8-9: -1)
-Entering state 28
-Next token is token '\n' (4.10-5.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (4.1-4: -1)
-   $2 = token '=' (4.6: )
-   $3 = nterm exp (4.8-9: -1)
--> $$ = nterm exp (4.1-9: -1)
+Next token is token ')' (1.5: )
+Entering state 11
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
 Entering state 8
-Next token is token '\n' (4.10-5.0: )
-Shifting token '\n' (4.10-5.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (4.1-9: -1)
-   $2 = token '\n' (4.10-5.0: )
--> $$ = nterm line (4.1-5.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-4.0: )
-   $2 = nterm line (4.1-5.0: )
--> $$ = nterm input (1.1-5.0: )
-Entering state 6
 Reading a token
-Next token is token '(' (5.1: )
-Shifting token '(' (5.1: )
-Entering state 4
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
 Reading a token
-Next token is token '-' (5.2: )
-Shifting token '-' (5.2: )
-Entering state 2
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
 Reading a token
-Next token is token "number" (5.3: 1)
-Shifting token "number" (5.3: 1)
+Next token is token "number" (1.10: 1)
+Shifting token "number" (1.10: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (5.3: 1)
--> $$ = nterm exp (5.3: 1)
-Entering state 10
-Reading a token
-Next token is token ')' (5.4: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (5.2: )
-   $2 = nterm exp (5.3: 1)
--> $$ = nterm exp (5.2-3: -1)
+   $1 = token "number" (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
 Entering state 12
-Next token is token ')' (5.4: )
-Shifting token ')' (5.4: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
-   $1 = token '(' (5.1: )
-   $2 = nterm exp (5.2-3: -1)
-   $3 = token ')' (5.4: )
--> $$ = nterm exp (5.1-4: -1)
-Entering state 8
 Reading a token
-Next token is token '^' (5.5: )
-Shifting token '^' (5.5: )
-Entering state 24
+Next token is token "number" (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
+Shifting token error (1.10-12: )
+Entering state 11
+Next token is token "number" (1.12: 2)
+Error: discarding token "number" (1.12: 2)
 Reading a token
-Next token is token "number" (5.6: 2)
-Shifting token "number" (5.6: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (5.6: 2)
--> $$ = nterm exp (5.6: 2)
-Entering state 33
+Next token is token ')' (1.13: )
+Entering state 11
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
+Entering state 30
 Reading a token
-Next token is token '=' (5.8: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (5.1-4: -1)
-   $2 = token '^' (5.5: )
-   $3 = nterm exp (5.6: 2)
--> $$ = nterm exp (5.1-6: 1)
+Next token is token '=' (1.15: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
 Entering state 8
-Next token is token '=' (5.8: )
-Shifting token '=' (5.8: )
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
 Entering state 19
 Reading a token
-Next token is token "number" (5.10: 1)
-Shifting token "number" (5.10: 1)
+Next token is token "number" (1.17: 1)
+Shifting token "number" (1.17: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (5.10: 1)
--> $$ = nterm exp (5.10: 1)
+   $1 = token "number" (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
 Entering state 28
 Reading a token
-Next token is token '\n' (5.11-6.0: )
+Next token is token '\n' (1.18-2.0: )
 Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (5.1-6: 1)
-   $2 = token '=' (5.8: )
-   $3 = nterm exp (5.10: 1)
--> $$ = nterm exp (5.1-10: 1)
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
 Entering state 8
-Next token is token '\n' (5.11-6.0: )
-Shifting token '\n' (5.11-6.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (5.1-10: 1)
-   $2 = token '\n' (5.11-6.0: )
--> $$ = nterm line (5.1-6.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-5.0: )
-   $2 = nterm line (5.1-6.0: )
--> $$ = nterm input (1.1-6.0: )
-Entering state 6
-Reading a token
-Next token is token '\n' (6.1-7.0: )
-Shifting token '\n' (6.1-7.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (6.1-7.0: )
--> $$ = nterm line (6.1-7.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-6.0: )
-   $2 = nterm line (6.1-7.0: )
--> $$ = nterm input (1.1-7.0: )
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Reading a token
-Next token is token '-' (7.1: )
-Shifting token '-' (7.1: )
-Entering state 2
-Reading a token
-Next token is token '-' (7.2: )
-Shifting token '-' (7.2: )
-Entering state 2
-Reading a token
-Next token is token '-' (7.3: )
-Shifting token '-' (7.3: )
-Entering state 2
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+Starting parse
+Entering state 0
 Reading a token
-Next token is token "number" (7.4: 1)
-Shifting token "number" (7.4: 1)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (7.4: 1)
--> $$ = nterm exp (7.4: 1)
-Entering state 10
-Reading a token
-Next token is token '=' (7.6: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (7.3: )
-   $2 = nterm exp (7.4: 1)
--> $$ = nterm exp (7.3-4: -1)
-Entering state 10
-Next token is token '=' (7.6: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (7.2: )
-   $2 = nterm exp (7.3-4: -1)
--> $$ = nterm exp (7.2-4: 1)
-Entering state 10
-Next token is token '=' (7.6: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (7.1: )
-   $2 = nterm exp (7.2-4: 1)
--> $$ = nterm exp (7.1-4: -1)
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
-Next token is token '=' (7.6: )
-Shifting token '=' (7.6: )
-Entering state 19
 Reading a token
-Next token is token '-' (7.8: )
-Shifting token '-' (7.8: )
-Entering state 2
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
 Reading a token
-Next token is token "number" (7.9: 1)
-Shifting token "number" (7.9: 1)
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (7.9: 1)
--> $$ = nterm exp (7.9: 1)
-Entering state 10
-Reading a token
-Next token is token '\n' (7.10-8.0: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (7.8: )
-   $2 = nterm exp (7.9: 1)
--> $$ = nterm exp (7.8-9: -1)
-Entering state 28
-Next token is token '\n' (7.10-8.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (7.1-4: -1)
-   $2 = token '=' (7.6: )
-   $3 = nterm exp (7.8-9: -1)
--> $$ = nterm exp (7.1-9: -1)
-Entering state 8
-Next token is token '\n' (7.10-8.0: )
-Shifting token '\n' (7.10-8.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (7.1-9: -1)
-   $2 = token '\n' (7.10-8.0: )
--> $$ = nterm line (7.1-8.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-7.0: )
-   $2 = nterm line (7.1-8.0: )
--> $$ = nterm input (1.1-8.0: )
-Entering state 6
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
 Reading a token
-Next token is token '\n' (8.1-9.0: )
-Shifting token '\n' (8.1-9.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (8.1-9.0: )
--> $$ = nterm line (8.1-9.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-8.0: )
-   $2 = nterm line (8.1-9.0: )
--> $$ = nterm input (1.1-9.0: )
-Entering state 6
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
 Reading a token
-Next token is token "number" (9.1: 1)
-Shifting token "number" (9.1: 1)
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (9.1: 1)
--> $$ = nterm exp (9.1: 1)
-Entering state 8
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
 Reading a token
-Next token is token '-' (9.3: )
-Shifting token '-' (9.3: )
-Entering state 20
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 107):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
 Reading a token
-Next token is token "number" (9.5: 2)
-Shifting token "number" (9.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (9.5: 2)
--> $$ = nterm exp (9.5: 2)
-Entering state 29
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
 Reading a token
-Next token is token '-' (9.7: )
-Reducing stack 0 by rule 8 (line 100):
-   $1 = nterm exp (9.1: 1)
-   $2 = token '-' (9.3: )
-   $3 = nterm exp (9.5: 2)
--> $$ = nterm exp (9.1-5: -1)
-Entering state 8
-Next token is token '-' (9.7: )
-Shifting token '-' (9.7: )
-Entering state 20
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Reducing stack 0 by rule 18 (line 130):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1426: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
+./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1409: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1411: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1407: cat stderr
+stderr:
+./calc.at:1409: cat stderr
+./calc.at:1411: cat stderr
+Starting parse
+Entering state 0
 Reading a token
-Next token is token "number" (9.9: 3)
-Shifting token "number" (9.9: 3)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (9.9: 3)
--> $$ = nterm exp (9.9: 3)
-Entering state 29
-Reading a token
-Next token is token '=' (9.11: )
-Reducing stack 0 by rule 8 (line 100):
-   $1 = nterm exp (9.1-5: -1)
-   $2 = token '-' (9.7: )
-   $3 = nterm exp (9.9: 3)
--> $$ = nterm exp (9.1-9: -4)
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
-Next token is token '=' (9.11: )
-Shifting token '=' (9.11: )
-Entering state 19
 Reading a token
-Next token is token '-' (9.13: )
-Shifting token '-' (9.13: )
-Entering state 2
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
 Reading a token
-Next token is token "number" (9.14: 4)
-Shifting token "number" (9.14: 4)
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (9.14: 4)
--> $$ = nterm exp (9.14: 4)
-Entering state 10
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
 Reading a token
-Next token is token '\n' (9.15-10.0: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (9.13: )
-   $2 = nterm exp (9.14: 4)
--> $$ = nterm exp (9.13-14: -4)
-Entering state 28
-Next token is token '\n' (9.15-10.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (9.1-9: -4)
-   $2 = token '=' (9.11: )
-   $3 = nterm exp (9.13-14: -4)
--> $$ = nterm exp (9.1-14: -4)
-Entering state 8
-Next token is token '\n' (9.15-10.0: )
-Shifting token '\n' (9.15-10.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (9.1-14: -4)
-   $2 = token '\n' (9.15-10.0: )
--> $$ = nterm line (9.1-10.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-9.0: )
-   $2 = nterm line (9.1-10.0: )
--> $$ = nterm input (1.1-10.0: )
-Entering state 6
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
 Reading a token
-Next token is token "number" (10.1: 1)
-Shifting token "number" (10.1: 1)
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (10.1: 1)
--> $$ = nterm exp (10.1: 1)
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 107):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
 Reading a token
-Next token is token '-' (10.3: )
-Shifting token '-' (10.3: )
-Entering state 20
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
 Reading a token
-Next token is token '(' (10.5: )
-Shifting token '(' (10.5: )
-Entering state 4
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Reducing stack 0 by rule 18 (line 130):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+input:
+input:
+  | 1 + 2 * 3 + !* ++
+./calc.at:1407:  $PREPARSER ./calc  input
+input:
+  | 1//2
+./calc.at:1411:  $PREPARSER ./calc  input
+./calc.at:1408: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+  | (* *) + (*) + (*)
+./calc.at:1409:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
 Reading a token
-Next token is token "number" (10.6: 2)
-Shifting token "number" (10.6: 2)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (10.6: 2)
--> $$ = nterm exp (10.6: 2)
-Entering state 12
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
 Reading a token
-Next token is token '-' (10.8: )
-Shifting token '-' (10.8: )
-Entering state 20
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
 Reading a token
-Next token is token "number" (10.10: 3)
-Shifting token "number" (10.10: 3)
+Next token is token "number" (1.1: 2)
+Shifting token "number" (1.1: 2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (10.10: 3)
--> $$ = nterm exp (10.10: 3)
-Entering state 29
-Reading a token
-Next token is token ')' (10.11: )
-Reducing stack 0 by rule 8 (line 100):
-   $1 = nterm exp (10.6: 2)
-   $2 = token '-' (10.8: )
-   $3 = nterm exp (10.10: 3)
--> $$ = nterm exp (10.6-10: -1)
-Entering state 12
-Next token is token ')' (10.11: )
-Shifting token ')' (10.11: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
-   $1 = token '(' (10.5: )
-   $2 = nterm exp (10.6-10: -1)
-   $3 = token ')' (10.11: )
--> $$ = nterm exp (10.5-11: -1)
-Entering state 29
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 2)
+-> $$ = nterm exp (1.1: 2)
+Entering state 30
 Reading a token
-Next token is token '=' (10.13: )
-Reducing stack 0 by rule 8 (line 100):
-   $1 = nterm exp (10.1: 1)
-   $2 = token '-' (10.3: )
-   $3 = nterm exp (10.5-11: -1)
--> $$ = nterm exp (10.1-11: 2)
-Entering state 8
-Next token is token '=' (10.13: )
-Shifting token '=' (10.13: )
-Entering state 19
+Next token is token '*' (1.1: )
+Shifting token '*' (1.1: )
+Entering state 22
 Reading a token
-Next token is token "number" (10.15: 2)
-Shifting token "number" (10.15: 2)
+Next token is token "number" (1.1: 3)
+Shifting token "number" (1.1: 3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (10.15: 2)
--> $$ = nterm exp (10.15: 2)
-Entering state 28
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 3)
+-> $$ = nterm exp (1.1: 3)
+Entering state 31
 Reading a token
-Next token is token '\n' (10.16-11.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (10.1-11: 2)
-   $2 = token '=' (10.13: )
-   $3 = nterm exp (10.15: 2)
--> $$ = nterm exp (10.1-15: 2)
+Next token is token '+' (1.1: )
+Reducing stack 0 by rule 9 (line 100):
+   $1 = nterm exp (1.1: 2)
+   $2 = token '*' (1.1: )
+   $3 = nterm exp (1.1: 3)
+-> $$ = nterm exp (1.1: 6)
+Entering state 30
+Next token is token '+' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 6)
+-> $$ = nterm exp (1.1: 7)
 Entering state 8
-Next token is token '\n' (10.16-11.0: )
-Shifting token '\n' (10.16-11.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (10.1-15: 2)
-   $2 = token '\n' (10.16-11.0: )
--> $$ = nterm line (10.1-11.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-10.0: )
-   $2 = nterm line (10.1-11.0: )
--> $$ = nterm input (1.1-11.0: )
-Entering state 6
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
 Reading a token
-Next token is token '\n' (11.1-12.0: )
-Shifting token '\n' (11.1-12.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (11.1-12.0: )
--> $$ = nterm line (11.1-12.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-11.0: )
-   $2 = nterm line (11.1-12.0: )
--> $$ = nterm input (1.1-12.0: )
-Entering state 6
+Next token is token '!' (1.1: )
+Shifting token '!' (1.1: )
+Entering state 5
 Reading a token
-Next token is token "number" (12.1: 2)
-Shifting token "number" (12.1: 2)
+Next token is token '*' (1.1: )
+Shifting token '*' (1.1: )
+Entering state 15
+Reducing stack 0 by rule 19 (line 124):
+   $1 = token '!' (1.1: )
+   $2 = token '*' (1.1: )
+memory exhausted
+Cleanup: popping token '+' (1.1: )
+Cleanup: popping nterm exp (1.1: 7)
+./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (12.1: 2)
--> $$ = nterm exp (12.1: 2)
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
 Reading a token
-Next token is token '^' (12.2: )
-Shifting token '^' (12.2: )
-Entering state 24
-Reading a token
-Next token is token "number" (12.3: 2)
-Shifting token "number" (12.3: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (12.3: 2)
--> $$ = nterm exp (12.3: 2)
-Entering state 33
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 23
 Reading a token
-Next token is token '^' (12.4: )
-Shifting token '^' (12.4: )
-Entering state 24
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '/' (1.3: )
+./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
 Reading a token
-Next token is token "number" (12.5: 3)
-Shifting token "number" (12.5: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (12.5: 3)
--> $$ = nterm exp (12.5: 3)
-Entering state 33
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
 Reading a token
-Next token is token '=' (12.7: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (12.3: 2)
-   $2 = token '^' (12.4: )
-   $3 = nterm exp (12.5: 3)
--> $$ = nterm exp (12.3-5: 8)
-Entering state 33
-Next token is token '=' (12.7: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (12.1: 2)
-   $2 = token '^' (12.2: )
-   $3 = nterm exp (12.3-5: 8)
--> $$ = nterm exp (12.1-5: 256)
-Entering state 8
-Next token is token '=' (12.7: )
-Shifting token '=' (12.7: )
-Entering state 19
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
 Reading a token
-Next token is token "number" (12.9-11: 256)
-Shifting token "number" (12.9-11: 256)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (12.9-11: 256)
--> $$ = nterm exp (12.9-11: 256)
-Entering state 28
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
 Reading a token
-Next token is token '\n' (12.12-13.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (12.1-5: 256)
-   $2 = token '=' (12.7: )
-   $3 = nterm exp (12.9-11: 256)
--> $$ = nterm exp (12.1-11: 256)
+Next token is token ')' (1.5: )
+Entering state 11
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
 Entering state 8
-Next token is token '\n' (12.12-13.0: )
-Shifting token '\n' (12.12-13.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (12.1-11: 256)
-   $2 = token '\n' (12.12-13.0: )
--> $$ = nterm line (12.1-13.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-12.0: )
-   $2 = nterm line (12.1-13.0: )
--> $$ = nterm input (1.1-13.0: )
-Entering state 6
 Reading a token
-Next token is token '(' (13.1: )
-Shifting token '(' (13.1: )
-Entering state 4
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
 Reading a token
-Next token is token "number" (13.2: 2)
-Shifting token "number" (13.2: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (13.2: 2)
--> $$ = nterm exp (13.2: 2)
-Entering state 12
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
 Reading a token
-Next token is token '^' (13.3: )
-Shifting token '^' (13.3: )
-Entering state 24
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
+Entering state 11
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
 Reading a token
-Next token is token "number" (13.4: 2)
-Shifting token "number" (13.4: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (13.4: 2)
--> $$ = nterm exp (13.4: 2)
-Entering state 33
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.9-11: 1111)
+Entering state 30
 Reading a token
-Next token is token ')' (13.5: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (13.2: 2)
-   $2 = token '^' (13.3: )
-   $3 = nterm exp (13.4: 2)
--> $$ = nterm exp (13.2-4: 4)
-Entering state 12
-Next token is token ')' (13.5: )
-Shifting token ')' (13.5: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
-   $1 = token '(' (13.1: )
-   $2 = nterm exp (13.2-4: 4)
-   $3 = token ')' (13.5: )
--> $$ = nterm exp (13.1-5: 4)
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
 Entering state 8
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
 Reading a token
-Next token is token '^' (13.6: )
-Shifting token '^' (13.6: )
-Entering state 24
-Reading a token
-Next token is token "number" (13.7: 3)
-Shifting token "number" (13.7: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (13.7: 3)
--> $$ = nterm exp (13.7: 3)
-Entering state 33
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
 Reading a token
-Next token is token '=' (13.9: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (13.1-5: 4)
-   $2 = token '^' (13.6: )
-   $3 = nterm exp (13.7: 3)
--> $$ = nterm exp (13.1-7: 64)
-Entering state 8
-Next token is token '=' (13.9: )
-Shifting token '=' (13.9: )
-Entering state 19
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
 Reading a token
-Next token is token "number" (13.11-12: 64)
-Shifting token "number" (13.11-12: 64)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (13.11-12: 64)
--> $$ = nterm exp (13.11-12: 64)
-Entering state 28
+Next token is token ')' (1.17: )
+Entering state 11
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 30
 Reading a token
-Next token is token '\n' (13.13-14.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (13.1-7: 64)
-   $2 = token '=' (13.9: )
-   $3 = nterm exp (13.11-12: 64)
--> $$ = nterm exp (13.1-12: 64)
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
 Entering state 8
-Next token is token '\n' (13.13-14.0: )
-Shifting token '\n' (13.13-14.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (13.1-12: 64)
-   $2 = token '\n' (13.13-14.0: )
--> $$ = nterm line (13.1-14.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-13.0: )
-   $2 = nterm line (13.1-14.0: )
--> $$ = nterm input (1.1-14.0: )
+   $1 = nterm exp (1.1-17: 3333)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token "end of input" (14.1: )
+Shifting token "end of input" (2.1: )
 Entering state 17
-Cleanup: popping token "end of input" (14.1: )
-Cleanup: popping nterm input (1.1-14.0: )
-./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1 2
-./calc.at:1409:  $PREPARSER ./calc  input
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1408: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -102104,16 +103933,88 @@
 Next token is token "number" (1.1: 1)
 Shifting token "number" (1.1: 1)
 Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.1: 2)
+Shifting token "number" (1.1: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 2)
+-> $$ = nterm exp (1.1: 2)
+Entering state 30
+Reading a token
+Next token is token '*' (1.1: )
+Shifting token '*' (1.1: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.1: 3)
+Shifting token "number" (1.1: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 3)
+-> $$ = nterm exp (1.1: 3)
+Entering state 31
+Reading a token
+Next token is token '+' (1.1: )
+Reducing stack 0 by rule 9 (line 100):
+   $1 = nterm exp (1.1: 2)
+   $2 = token '*' (1.1: )
+   $3 = nterm exp (1.1: 3)
+-> $$ = nterm exp (1.1: 6)
+Entering state 30
+Next token is token '+' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 6)
+-> $$ = nterm exp (1.1: 7)
+Entering state 8
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+Next token is token '!' (1.1: )
+Shifting token '!' (1.1: )
+Entering state 5
+Reading a token
+Next token is token '*' (1.1: )
+Shifting token '*' (1.1: )
+Entering state 15
+Reducing stack 0 by rule 19 (line 124):
+   $1 = token '!' (1.1: )
+   $2 = token '*' (1.1: )
+memory exhausted
+Cleanup: popping token '+' (1.1: )
+Cleanup: popping nterm exp (1.1: 7)
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
 Reducing stack 0 by rule 5 (line 88):
    $1 = token "number" (1.1: 1)
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Reading a token
-Next token is token "number" (1.3: 2)
-1.3: syntax error, unexpected number
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 23
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
 Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token "number" (1.3: 2)
-./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Cleanup: discarding lookahead token '/' (1.3: )
+stderr:
+input:
 Starting parse
 Entering state 0
 Reading a token
@@ -102229,9 +104130,19 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+  | 1 + 2 * 3 + !* ++
+./calc.at:1408:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1411: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Reading a token
@@ -102267,968 +104178,145 @@
 -> $$ = nterm exp (1.9: 3)
 Entering state 31
 Reading a token
-Next token is token '=' (1.11: )
-Reducing stack 0 by rule 9 (line 101):
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 107):
    $1 = nterm exp (1.5: 2)
    $2 = token '*' (1.7: )
    $3 = nterm exp (1.9: 3)
 -> $$ = nterm exp (1.5-9: 6)
 Entering state 30
-Next token is token '=' (1.11: )
-Reducing stack 0 by rule 7 (line 99):
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 105):
    $1 = nterm exp (1.1: 1)
    $2 = token '+' (1.3: )
    $3 = nterm exp (1.5-9: 6)
 -> $$ = nterm exp (1.1-9: 7)
 Entering state 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 19
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
 Reading a token
-Next token is token "number" (1.13: 7)
-Shifting token "number" (1.13: 7)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.13: 7)
--> $$ = nterm exp (1.13: 7)
-Entering state 28
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
 Reading a token
-Next token is token '\n' (1.14-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-9: 7)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13: 7)
--> $$ = nterm exp (1.1-13: 7)
-Entering state 8
-Next token is token '\n' (1.14-2.0: )
-Shifting token '\n' (1.14-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-13: 7)
-   $2 = token '\n' (1.14-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
+Next token is token '*' (1.14: )
+Shifting token '*' (1.14: )
+Entering state 15
+Reducing stack 0 by rule 19 (line 131):
+   $1 = token '!' (1.13: )
+   $2 = token '*' (1.14: )
+1.14: memory exhausted
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1407: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1409: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+./calc.at:1411: cat stderr
+./calc.at:1409: cat stderr
+Starting parse
+Entering state 0
 Reading a token
-Next token is token "number" (2.1: 1)
-Shifting token "number" (2.1: 1)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (2.1: 1)
--> $$ = nterm exp (2.1: 1)
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
 Reading a token
-Next token is token '+' (2.3: )
-Shifting token '+' (2.3: )
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
 Entering state 21
 Reading a token
-Next token is token "number" (2.5: 2)
-Shifting token "number" (2.5: 2)
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (2.5: 2)
--> $$ = nterm exp (2.5: 2)
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
 Entering state 30
 Reading a token
-Next token is token '*' (2.7: )
-Shifting token '*' (2.7: )
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
 Entering state 22
 Reading a token
-Next token is token '-' (2.9: )
-Shifting token '-' (2.9: )
-Entering state 2
-Reading a token
-Next token is token "number" (2.10: 3)
-Shifting token "number" (2.10: 3)
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (2.10: 3)
--> $$ = nterm exp (2.10: 3)
-Entering state 10
-Reading a token
-Next token is token '=' (2.12: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (2.9: )
-   $2 = nterm exp (2.10: 3)
--> $$ = nterm exp (2.9-10: -3)
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
 Entering state 31
-Next token is token '=' (2.12: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (2.5: 2)
-   $2 = token '*' (2.7: )
-   $3 = nterm exp (2.9-10: -3)
--> $$ = nterm exp (2.5-10: -6)
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 107):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
 Entering state 30
-Next token is token '=' (2.12: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (2.1: 1)
-   $2 = token '+' (2.3: )
-   $3 = nterm exp (2.5-10: -6)
--> $$ = nterm exp (2.1-10: -5)
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
-Next token is token '=' (2.12: )
-Shifting token '=' (2.12: )
-Entering state 19
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
 Reading a token
-Next token is token '-' (2.14: )
-Shifting token '-' (2.14: )
-Entering state 2
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
 Reading a token
-Next token is token "number" (2.15: 5)
-Shifting token "number" (2.15: 5)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (2.15: 5)
--> $$ = nterm exp (2.15: 5)
-Entering state 10
-Reading a token
-Next token is token '\n' (2.16-3.0: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (2.14: )
-   $2 = nterm exp (2.15: 5)
--> $$ = nterm exp (2.14-15: -5)
-Entering state 28
-Next token is token '\n' (2.16-3.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (2.1-10: -5)
-   $2 = token '=' (2.12: )
-   $3 = nterm exp (2.14-15: -5)
--> $$ = nterm exp (2.1-15: -5)
-Entering state 8
-Next token is token '\n' (2.16-3.0: )
-Shifting token '\n' (2.16-3.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (2.1-15: -5)
-   $2 = token '\n' (2.16-3.0: )
--> $$ = nterm line (2.1-3.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-2.0: )
-   $2 = nterm line (2.1-3.0: )
--> $$ = nterm input (1.1-3.0: )
-Entering state 6
-Reading a token
-Next token is token '\n' (3.1-4.0: )
-Shifting token '\n' (3.1-4.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (3.1-4.0: )
--> $$ = nterm line (3.1-4.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-3.0: )
-   $2 = nterm line (3.1-4.0: )
--> $$ = nterm input (1.1-4.0: )
-Entering state 6
-Reading a token
-Next token is token '-' (4.1: )
-Shifting token '-' (4.1: )
-Entering state 2
-Reading a token
-Next token is token "number" (4.2: 1)
-Shifting token "number" (4.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (4.2: 1)
--> $$ = nterm exp (4.2: 1)
-Entering state 10
-Reading a token
-Next token is token '^' (4.3: )
-Shifting token '^' (4.3: )
-Entering state 24
-Reading a token
-Next token is token "number" (4.4: 2)
-Shifting token "number" (4.4: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (4.4: 2)
--> $$ = nterm exp (4.4: 2)
-Entering state 33
-Reading a token
-Next token is token '=' (4.6: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (4.2: 1)
-   $2 = token '^' (4.3: )
-   $3 = nterm exp (4.4: 2)
--> $$ = nterm exp (4.2-4: 1)
-Entering state 10
-Next token is token '=' (4.6: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (4.1: )
-   $2 = nterm exp (4.2-4: 1)
--> $$ = nterm exp (4.1-4: -1)
-Entering state 8
-Next token is token '=' (4.6: )
-Shifting token '=' (4.6: )
-Entering state 19
-Reading a token
-Next token is token '-' (4.8: )
-Shifting token '-' (4.8: )
-Entering state 2
-Reading a token
-Next token is token "number" (4.9: 1)
-Shifting token "number" (4.9: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (4.9: 1)
--> $$ = nterm exp (4.9: 1)
-Entering state 10
-Reading a token
-Next token is token '\n' (4.10-5.0: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (4.8: )
-   $2 = nterm exp (4.9: 1)
--> $$ = nterm exp (4.8-9: -1)
-Entering state 28
-Next token is token '\n' (4.10-5.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (4.1-4: -1)
-   $2 = token '=' (4.6: )
-   $3 = nterm exp (4.8-9: -1)
--> $$ = nterm exp (4.1-9: -1)
-Entering state 8
-Next token is token '\n' (4.10-5.0: )
-Shifting token '\n' (4.10-5.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (4.1-9: -1)
-   $2 = token '\n' (4.10-5.0: )
--> $$ = nterm line (4.1-5.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-4.0: )
-   $2 = nterm line (4.1-5.0: )
--> $$ = nterm input (1.1-5.0: )
-Entering state 6
-Reading a token
-Next token is token '(' (5.1: )
-Shifting token '(' (5.1: )
-Entering state 4
-Reading a token
-Next token is token '-' (5.2: )
-Shifting token '-' (5.2: )
-Entering state 2
-Reading a token
-Next token is token "number" (5.3: 1)
-Shifting token "number" (5.3: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (5.3: 1)
--> $$ = nterm exp (5.3: 1)
-Entering state 10
-Reading a token
-Next token is token ')' (5.4: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (5.2: )
-   $2 = nterm exp (5.3: 1)
--> $$ = nterm exp (5.2-3: -1)
-Entering state 12
-Next token is token ')' (5.4: )
-Shifting token ')' (5.4: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
-   $1 = token '(' (5.1: )
-   $2 = nterm exp (5.2-3: -1)
-   $3 = token ')' (5.4: )
--> $$ = nterm exp (5.1-4: -1)
-Entering state 8
-Reading a token
-Next token is token '^' (5.5: )
-Shifting token '^' (5.5: )
-Entering state 24
-Reading a token
-Next token is token "number" (5.6: 2)
-Shifting token "number" (5.6: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (5.6: 2)
--> $$ = nterm exp (5.6: 2)
-Entering state 33
-Reading a token
-Next token is token '=' (5.8: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (5.1-4: -1)
-   $2 = token '^' (5.5: )
-   $3 = nterm exp (5.6: 2)
--> $$ = nterm exp (5.1-6: 1)
-Entering state 8
-Next token is token '=' (5.8: )
-Shifting token '=' (5.8: )
-Entering state 19
-Reading a token
-Next token is token "number" (5.10: 1)
-Shifting token "number" (5.10: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (5.10: 1)
--> $$ = nterm exp (5.10: 1)
-Entering state 28
-Reading a token
-Next token is token '\n' (5.11-6.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (5.1-6: 1)
-   $2 = token '=' (5.8: )
-   $3 = nterm exp (5.10: 1)
--> $$ = nterm exp (5.1-10: 1)
-Entering state 8
-Next token is token '\n' (5.11-6.0: )
-Shifting token '\n' (5.11-6.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (5.1-10: 1)
-   $2 = token '\n' (5.11-6.0: )
--> $$ = nterm line (5.1-6.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-5.0: )
-   $2 = nterm line (5.1-6.0: )
--> $$ = nterm input (1.1-6.0: )
-Entering state 6
-Reading a token
-Next token is token '\n' (6.1-7.0: )
-Shifting token '\n' (6.1-7.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (6.1-7.0: )
--> $$ = nterm line (6.1-7.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-6.0: )
-   $2 = nterm line (6.1-7.0: )
--> $$ = nterm input (1.1-7.0: )
-Entering state 6
-Reading a token
-Next token is token '-' (7.1: )
-Shifting token '-' (7.1: )
-Entering state 2
-Reading a token
-Next token is token '-' (7.2: )
-Shifting token '-' (7.2: )
-Entering state 2
-Reading a token
-Next token is token '-' (7.3: )
-Shifting token '-' (7.3: )
-Entering state 2
-Reading a token
-Next token is token "number" (7.4: 1)
-Shifting token "number" (7.4: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (7.4: 1)
--> $$ = nterm exp (7.4: 1)
-Entering state 10
-Reading a token
-Next token is token '=' (7.6: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (7.3: )
-   $2 = nterm exp (7.4: 1)
--> $$ = nterm exp (7.3-4: -1)
-Entering state 10
-Next token is token '=' (7.6: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (7.2: )
-   $2 = nterm exp (7.3-4: -1)
--> $$ = nterm exp (7.2-4: 1)
-Entering state 10
-Next token is token '=' (7.6: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (7.1: )
-   $2 = nterm exp (7.2-4: 1)
--> $$ = nterm exp (7.1-4: -1)
-Entering state 8
-Next token is token '=' (7.6: )
-Shifting token '=' (7.6: )
-Entering state 19
-Reading a token
-Next token is token '-' (7.8: )
-Shifting token '-' (7.8: )
-Entering state 2
-Reading a token
-Next token is token "number" (7.9: 1)
-Shifting token "number" (7.9: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (7.9: 1)
--> $$ = nterm exp (7.9: 1)
-Entering state 10
-Reading a token
-Next token is token '\n' (7.10-8.0: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (7.8: )
-   $2 = nterm exp (7.9: 1)
--> $$ = nterm exp (7.8-9: -1)
-Entering state 28
-Next token is token '\n' (7.10-8.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (7.1-4: -1)
-   $2 = token '=' (7.6: )
-   $3 = nterm exp (7.8-9: -1)
--> $$ = nterm exp (7.1-9: -1)
-Entering state 8
-Next token is token '\n' (7.10-8.0: )
-Shifting token '\n' (7.10-8.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (7.1-9: -1)
-   $2 = token '\n' (7.10-8.0: )
--> $$ = nterm line (7.1-8.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-7.0: )
-   $2 = nterm line (7.1-8.0: )
--> $$ = nterm input (1.1-8.0: )
-Entering state 6
-Reading a token
-Next token is token '\n' (8.1-9.0: )
-Shifting token '\n' (8.1-9.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (8.1-9.0: )
--> $$ = nterm line (8.1-9.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-8.0: )
-   $2 = nterm line (8.1-9.0: )
--> $$ = nterm input (1.1-9.0: )
-Entering state 6
-Reading a token
-Next token is token "number" (9.1: 1)
-Shifting token "number" (9.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (9.1: 1)
--> $$ = nterm exp (9.1: 1)
-Entering state 8
-Reading a token
-Next token is token '-' (9.3: )
-Shifting token '-' (9.3: )
-Entering state 20
-Reading a token
-Next token is token "number" (9.5: 2)
-Shifting token "number" (9.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (9.5: 2)
--> $$ = nterm exp (9.5: 2)
-Entering state 29
-Reading a token
-Next token is token '-' (9.7: )
-Reducing stack 0 by rule 8 (line 100):
-   $1 = nterm exp (9.1: 1)
-   $2 = token '-' (9.3: )
-   $3 = nterm exp (9.5: 2)
--> $$ = nterm exp (9.1-5: -1)
-Entering state 8
-Next token is token '-' (9.7: )
-Shifting token '-' (9.7: )
-Entering state 20
-Reading a token
-Next token is token "number" (9.9: 3)
-Shifting token "number" (9.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (9.9: 3)
--> $$ = nterm exp (9.9: 3)
-Entering state 29
-Reading a token
-Next token is token '=' (9.11: )
-Reducing stack 0 by rule 8 (line 100):
-   $1 = nterm exp (9.1-5: -1)
-   $2 = token '-' (9.7: )
-   $3 = nterm exp (9.9: 3)
--> $$ = nterm exp (9.1-9: -4)
-Entering state 8
-Next token is token '=' (9.11: )
-Shifting token '=' (9.11: )
-Entering state 19
-Reading a token
-Next token is token '-' (9.13: )
-Shifting token '-' (9.13: )
-Entering state 2
-Reading a token
-Next token is token "number" (9.14: 4)
-Shifting token "number" (9.14: 4)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (9.14: 4)
--> $$ = nterm exp (9.14: 4)
-Entering state 10
-Reading a token
-Next token is token '\n' (9.15-10.0: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (9.13: )
-   $2 = nterm exp (9.14: 4)
--> $$ = nterm exp (9.13-14: -4)
-Entering state 28
-Next token is token '\n' (9.15-10.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (9.1-9: -4)
-   $2 = token '=' (9.11: )
-   $3 = nterm exp (9.13-14: -4)
--> $$ = nterm exp (9.1-14: -4)
-Entering state 8
-Next token is token '\n' (9.15-10.0: )
-Shifting token '\n' (9.15-10.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (9.1-14: -4)
-   $2 = token '\n' (9.15-10.0: )
--> $$ = nterm line (9.1-10.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-9.0: )
-   $2 = nterm line (9.1-10.0: )
--> $$ = nterm input (1.1-10.0: )
-Entering state 6
-Reading a token
-Next token is token "number" (10.1: 1)
-Shifting token "number" (10.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (10.1: 1)
--> $$ = nterm exp (10.1: 1)
-Entering state 8
-Reading a token
-Next token is token '-' (10.3: )
-Shifting token '-' (10.3: )
-Entering state 20
-Reading a token
-Next token is token '(' (10.5: )
-Shifting token '(' (10.5: )
-Entering state 4
-Reading a token
-Next token is token "number" (10.6: 2)
-Shifting token "number" (10.6: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (10.6: 2)
--> $$ = nterm exp (10.6: 2)
-Entering state 12
-Reading a token
-Next token is token '-' (10.8: )
-Shifting token '-' (10.8: )
-Entering state 20
-Reading a token
-Next token is token "number" (10.10: 3)
-Shifting token "number" (10.10: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (10.10: 3)
--> $$ = nterm exp (10.10: 3)
-Entering state 29
-Reading a token
-Next token is token ')' (10.11: )
-Reducing stack 0 by rule 8 (line 100):
-   $1 = nterm exp (10.6: 2)
-   $2 = token '-' (10.8: )
-   $3 = nterm exp (10.10: 3)
--> $$ = nterm exp (10.6-10: -1)
-Entering state 12
-Next token is token ')' (10.11: )
-Shifting token ')' (10.11: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
-   $1 = token '(' (10.5: )
-   $2 = nterm exp (10.6-10: -1)
-   $3 = token ')' (10.11: )
--> $$ = nterm exp (10.5-11: -1)
-Entering state 29
-Reading a token
-Next token is token '=' (10.13: )
-Reducing stack 0 by rule 8 (line 100):
-   $1 = nterm exp (10.1: 1)
-   $2 = token '-' (10.3: )
-   $3 = nterm exp (10.5-11: -1)
--> $$ = nterm exp (10.1-11: 2)
-Entering state 8
-Next token is token '=' (10.13: )
-Shifting token '=' (10.13: )
-Entering state 19
-Reading a token
-Next token is token "number" (10.15: 2)
-Shifting token "number" (10.15: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (10.15: 2)
--> $$ = nterm exp (10.15: 2)
-Entering state 28
-Reading a token
-Next token is token '\n' (10.16-11.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (10.1-11: 2)
-   $2 = token '=' (10.13: )
-   $3 = nterm exp (10.15: 2)
--> $$ = nterm exp (10.1-15: 2)
-Entering state 8
-Next token is token '\n' (10.16-11.0: )
-Shifting token '\n' (10.16-11.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (10.1-15: 2)
-   $2 = token '\n' (10.16-11.0: )
--> $$ = nterm line (10.1-11.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-10.0: )
-   $2 = nterm line (10.1-11.0: )
--> $$ = nterm input (1.1-11.0: )
-Entering state 6
-Reading a token
-Next token is token '\n' (11.1-12.0: )
-Shifting token '\n' (11.1-12.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (11.1-12.0: )
--> $$ = nterm line (11.1-12.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-11.0: )
-   $2 = nterm line (11.1-12.0: )
--> $$ = nterm input (1.1-12.0: )
-Entering state 6
-Reading a token
-Next token is token "number" (12.1: 2)
-Shifting token "number" (12.1: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (12.1: 2)
--> $$ = nterm exp (12.1: 2)
-Entering state 8
-Reading a token
-Next token is token '^' (12.2: )
-Shifting token '^' (12.2: )
-Entering state 24
-Reading a token
-Next token is token "number" (12.3: 2)
-Shifting token "number" (12.3: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (12.3: 2)
--> $$ = nterm exp (12.3: 2)
-Entering state 33
-Reading a token
-Next token is token '^' (12.4: )
-Shifting token '^' (12.4: )
-Entering state 24
-Reading a token
-Next token is token "number" (12.5: 3)
-Shifting token "number" (12.5: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (12.5: 3)
--> $$ = nterm exp (12.5: 3)
-Entering state 33
-Reading a token
-Next token is token '=' (12.7: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (12.3: 2)
-   $2 = token '^' (12.4: )
-   $3 = nterm exp (12.5: 3)
--> $$ = nterm exp (12.3-5: 8)
-Entering state 33
-Next token is token '=' (12.7: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (12.1: 2)
-   $2 = token '^' (12.2: )
-   $3 = nterm exp (12.3-5: 8)
--> $$ = nterm exp (12.1-5: 256)
-Entering state 8
-Next token is token '=' (12.7: )
-Shifting token '=' (12.7: )
-Entering state 19
-Reading a token
-Next token is token "number" (12.9-11: 256)
-Shifting token "number" (12.9-11: 256)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (12.9-11: 256)
--> $$ = nterm exp (12.9-11: 256)
-Entering state 28
-Reading a token
-Next token is token '\n' (12.12-13.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (12.1-5: 256)
-   $2 = token '=' (12.7: )
-   $3 = nterm exp (12.9-11: 256)
--> $$ = nterm exp (12.1-11: 256)
-Entering state 8
-Next token is token '\n' (12.12-13.0: )
-Shifting token '\n' (12.12-13.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (12.1-11: 256)
-   $2 = token '\n' (12.12-13.0: )
--> $$ = nterm line (12.1-13.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-12.0: )
-   $2 = nterm line (12.1-13.0: )
--> $$ = nterm input (1.1-13.0: )
-Entering state 6
-Reading a token
-Next token is token '(' (13.1: )
-Shifting token '(' (13.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (13.2: 2)
-Shifting token "number" (13.2: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (13.2: 2)
--> $$ = nterm exp (13.2: 2)
-Entering state 12
-Reading a token
-Next token is token '^' (13.3: )
-Shifting token '^' (13.3: )
-Entering state 24
-Reading a token
-Next token is token "number" (13.4: 2)
-Shifting token "number" (13.4: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (13.4: 2)
--> $$ = nterm exp (13.4: 2)
-Entering state 33
-Reading a token
-Next token is token ')' (13.5: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (13.2: 2)
-   $2 = token '^' (13.3: )
-   $3 = nterm exp (13.4: 2)
--> $$ = nterm exp (13.2-4: 4)
-Entering state 12
-Next token is token ')' (13.5: )
-Shifting token ')' (13.5: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
-   $1 = token '(' (13.1: )
-   $2 = nterm exp (13.2-4: 4)
-   $3 = token ')' (13.5: )
--> $$ = nterm exp (13.1-5: 4)
-Entering state 8
-Reading a token
-Next token is token '^' (13.6: )
-Shifting token '^' (13.6: )
-Entering state 24
-Reading a token
-Next token is token "number" (13.7: 3)
-Shifting token "number" (13.7: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (13.7: 3)
--> $$ = nterm exp (13.7: 3)
-Entering state 33
-Reading a token
-Next token is token '=' (13.9: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (13.1-5: 4)
-   $2 = token '^' (13.6: )
-   $3 = nterm exp (13.7: 3)
--> $$ = nterm exp (13.1-7: 64)
-Entering state 8
-Next token is token '=' (13.9: )
-Shifting token '=' (13.9: )
-Entering state 19
-Reading a token
-Next token is token "number" (13.11-12: 64)
-Shifting token "number" (13.11-12: 64)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (13.11-12: 64)
--> $$ = nterm exp (13.11-12: 64)
-Entering state 28
-Reading a token
-Next token is token '\n' (13.13-14.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (13.1-7: 64)
-   $2 = token '=' (13.9: )
-   $3 = nterm exp (13.11-12: 64)
--> $$ = nterm exp (13.1-12: 64)
-Entering state 8
-Next token is token '\n' (13.13-14.0: )
-Shifting token '\n' (13.13-14.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (13.1-12: 64)
-   $2 = token '\n' (13.13-14.0: )
--> $$ = nterm line (13.1-14.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-13.0: )
-   $2 = nterm line (13.1-14.0: )
--> $$ = nterm input (1.1-14.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (14.1: )
-Entering state 17
-Cleanup: popping token "end of input" (14.1: )
-Cleanup: popping nterm input (1.1-14.0: )
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token "number" (1.3: 2)
-1.3: syntax error, unexpected number
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token "number" (1.3: 2)
-stderr:
-./calc.at:1431: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+Next token is token '*' (1.14: )
+Shifting token '*' (1.14: )
+Entering state 15
+Reducing stack 0 by rule 19 (line 131):
+   $1 = token '!' (1.13: )
+   $2 = token '*' (1.14: )
+1.14: memory exhausted
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1407: cat stderr
 input:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
-Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Reading a token
-Next token is token ')' (1.5: )
-Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 126):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
-Reading a token
-Next token is token ')' (1.11: )
-Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 126):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
-Entering state 30
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 105):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
-Entering state 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
-Reading a token
-Next token is token ')' (1.17: )
-Entering state 11
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 126):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 30
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 7 (line 105):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
-Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-17: 3333)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-  | 1 2
+input:
+  | error
 ./calc.at:1411:  $PREPARSER ./calc  input
+input:
 stderr:
-./calc.at:1409: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1409:  $PREPARSER ./calc  input
+  | (#) + (#) = 2222
+./calc.at:1407:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token "number" (1.3: 2)
-1.3: syntax error, unexpected number
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token "number" (1.3: 2)
+Next token is token "invalid token" (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" (1.1: )
+./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1408: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -103239,42 +104327,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1408: cat stderr
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token "number" (1.3: 2)
-1.3: syntax error, unexpected number
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token "number" (1.3: 2)
-./calc.at:1409: cat stderr
-input:
-./calc.at:1411: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-./calc.at:1411: cat stderr
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1408:  $PREPARSER ./calc  input
-  | 1//2
-./calc.at:1409:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -103341,54 +104394,115 @@
    $2 = token '+' (1.14: )
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error (1.1: )
+Shifting token error (1.1: )
+Entering state 11
+Next token is token error (1.1: )
+Error: discarding token error (1.1: )
+Reading a token
+Next token is token ')' (1.1: )
+Entering state 11
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
 Entering state 8
 Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 23
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
 Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '/' (1.3: )
-./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+syntax error: invalid character: '#'
+Next token is token error (1.1: )
+Shifting token error (1.1: )
+Entering state 11
+Next token is token error (1.1: )
+Error: discarding token error (1.1: )
+Reading a token
+Next token is token ')' (1.1: )
+Entering state 11
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 30
+Reading a token
+Next token is token '=' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 2222)
+Entering state 8
+Next token is token '=' (1.1: )
+Shifting token '=' (1.1: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.1: 2222)
+Shifting token "number" (1.1: 2222)
 Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 2222)
+-> $$ = nterm exp (1.1: 2222)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.1: )
+Reducing stack 0 by rule 6 (line 82):
+   $1 = nterm exp (1.1: 2222)
+   $2 = token '=' (1.1: )
+   $3 = nterm exp (1.1: 2222)
+-> $$ = nterm exp (1.1: 2222)
 Entering state 8
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 77):
+   $1 = nterm exp (1.1: 2222)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
+Entering state 6
 Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 23
+Now at end of input.
+Shifting token "end of input" (1.1: )
+Entering state 17
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
+./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1408: cat stderr
+Starting parse
+Entering state 0
 Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '/' (1.3: )
-input:
+Next token is token "invalid token" (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" (1.1: )
+stderr:
 stderr:
-  | 1//2
-./calc.at:1411:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -103454,99 +104568,119 @@
    $2 = token '+' (1.14: )
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-stderr:
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1408:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 23
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
 Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '/' (1.3: )
-./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
+syntax error: invalid character: '#'
+Next token is token error (1.1: )
+Shifting token error (1.1: )
+Entering state 11
+Next token is token error (1.1: )
+Error: discarding token error (1.1: )
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Next token is token ')' (1.1: )
+Entering state 11
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
 Entering state 8
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
 Entering state 21
 Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error (1.1: )
+Shifting token error (1.1: )
+Entering state 11
+Next token is token error (1.1: )
+Error: discarding token error (1.1: )
+Reading a token
+Next token is token ')' (1.1: )
+Entering state 11
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
 Entering state 30
 Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
+Next token is token '=' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 2222)
+Entering state 8
+Next token is token '=' (1.1: )
+Shifting token '=' (1.1: )
+Entering state 19
 Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
+Next token is token "number" (1.1: 2222)
+Shifting token "number" (1.1: 2222)
 Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 2222)
+-> $$ = nterm exp (1.1: 2222)
+Entering state 28
 Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 107):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 105):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
+Next token is token '\n' (1.1: )
+Reducing stack 0 by rule 6 (line 82):
+   $1 = nterm exp (1.1: 2222)
+   $2 = token '=' (1.1: )
+   $3 = nterm exp (1.1: 2222)
+-> $$ = nterm exp (1.1: 2222)
 Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 77):
+   $1 = nterm exp (1.1: 2222)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
+Entering state 6
 Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Reducing stack 0 by rule 18 (line 130):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+Now at end of input.
+Shifting token "end of input" (1.1: )
+Entering state 17
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
+./calc.at:1411: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+  | 1 + 2 * 3 + !- ++
+input:
+./calc.at:1409:  $PREPARSER ./calc  input
+  | (#) + (#) = 2222
+./calc.at:1408:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -103613,62 +104747,10 @@
    $2 = token '-' (1.14: )
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 23
-Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '/' (1.3: )
-./calc.at:1409: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1408: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1409: cat stderr
-stdout:
-./calc.at:1413: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c calc.h
-
-./calc.at:1408: cat stderr
-./calc.at:1411: "$PERL" -pi -e 'use strict;
+./calc.at:1411: cat stderr
+./calc.at:1407: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -103678,116 +104760,104 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | error
-./calc.at:1409:  $PREPARSER ./calc  input
-input:
-input:
-stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token "invalid token" (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token "invalid token" (1.1: )
-./calc.at:1411: cat stderr
-./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1 + 2 * 3 + !* ++
-./calc.at:1408:  $PREPARSER ./calc  input
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1413:  $PREPARSER ./calc  input
-stderr:
-stderr:
-input:
-Starting parse
-Entering state 0
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
 Reading a token
-Next token is token "invalid token" (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token "invalid token" (1.1: )
-Starting parse
-Entering state 0
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Next token is token ')' (1.3: )
+Entering state 11
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
 Entering state 8
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
 Entering state 21
 Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
+Entering state 4
+Reading a token
+1.8: syntax error: invalid character: '#'
+Next token is token error (1.8: )
+Shifting token error (1.8: )
+Entering state 11
+Next token is token error (1.8: )
+Error: discarding token error (1.8: )
+Reading a token
+Next token is token ')' (1.9: )
+Entering state 11
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.7: )
+   $2 = token error (1.8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
 Entering state 30
 Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
+Entering state 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 19
 Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
+Next token is token "number" (1.13-16: 2222)
+Shifting token "number" (1.13-16: 2222)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
+   $1 = token "number" (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
+Entering state 28
 Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 107):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 105):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
+Next token is token '\n' (1.17-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
 Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
 Reading a token
-Next token is token '*' (1.14: )
-Shifting token '*' (1.14: )
-Entering state 15
-Reducing stack 0 by rule 19 (line 131):
-   $1 = token '!' (1.13: )
-   $2 = token '*' (1.14: )
-1.14: memory exhausted
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | error
-./calc.at:1411:  $PREPARSER ./calc  input
-stderr:
-stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -103846,89 +104916,100 @@
 Shifting token '!' (1.13: )
 Entering state 5
 Reading a token
-Next token is token '*' (1.14: )
-Shifting token '*' (1.14: )
-Entering state 15
-Reducing stack 0 by rule 19 (line 131):
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Reducing stack 0 by rule 18 (line 130):
    $1 = token '!' (1.13: )
-   $2 = token '*' (1.14: )
-1.14: memory exhausted
+   $2 = token '-' (1.14: )
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
+stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
+Reading a token
+Next token is token ')' (1.3: )
+Entering state 11
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
 Entering state 8
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
 Entering state 21
 Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
+Entering state 4
 Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
+1.8: syntax error: invalid character: '#'
+Next token is token error (1.8: )
+Shifting token error (1.8: )
+Entering state 11
+Next token is token error (1.8: )
+Error: discarding token error (1.8: )
 Reading a token
-Next token is token '=' (1.11: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
+Next token is token ')' (1.9: )
+Entering state 11
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.7: )
+   $2 = token error (1.8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
 Entering state 30
+Reading a token
 Next token is token '=' (1.11: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
 Entering state 8
 Next token is token '=' (1.11: )
 Shifting token '=' (1.11: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.13: 7)
-Shifting token "number" (1.13: 7)
+Next token is token "number" (1.13-16: 2222)
+Shifting token "number" (1.13-16: 2222)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.13: 7)
--> $$ = nterm exp (1.13: 7)
+   $1 = token "number" (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
 Entering state 28
 Reading a token
-Next token is token '\n' (1.14-2.0: )
+Next token is token '\n' (1.17-2.0: )
 Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-9: 7)
+   $1 = nterm exp (1.1-9: 2222)
    $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13: 7)
--> $$ = nterm exp (1.1-13: 7)
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
 Entering state 8
-Next token is token '\n' (1.14-2.0: )
-Shifting token '\n' (1.14-2.0: )
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-13: 7)
-   $2 = token '\n' (1.14-2.0: )
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 78):
@@ -103936,769 +105017,185 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Reading a token
-Next token is token "number" (2.1: 1)
-Shifting token "number" (2.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (2.1: 1)
--> $$ = nterm exp (2.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (2.3: )
-Shifting token '+' (2.3: )
-Entering state 21
-Reading a token
-Next token is token "number" (2.5: 2)
-Shifting token "number" (2.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (2.5: 2)
--> $$ = nterm exp (2.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (2.7: )
-Shifting token '*' (2.7: )
-Entering state 22
-Reading a token
-Next token is token '-' (2.9: )
-Shifting token '-' (2.9: )
-Entering state 2
-Reading a token
-Next token is token "number" (2.10: 3)
-Shifting token "number" (2.10: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (2.10: 3)
--> $$ = nterm exp (2.10: 3)
-Entering state 10
-Reading a token
-Next token is token '=' (2.12: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (2.9: )
-   $2 = nterm exp (2.10: 3)
--> $$ = nterm exp (2.9-10: -3)
-Entering state 31
-Next token is token '=' (2.12: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (2.5: 2)
-   $2 = token '*' (2.7: )
-   $3 = nterm exp (2.9-10: -3)
--> $$ = nterm exp (2.5-10: -6)
-Entering state 30
-Next token is token '=' (2.12: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (2.1: 1)
-   $2 = token '+' (2.3: )
-   $3 = nterm exp (2.5-10: -6)
--> $$ = nterm exp (2.1-10: -5)
-Entering state 8
-Next token is token '=' (2.12: )
-Shifting token '=' (2.12: )
-Entering state 19
-Reading a token
-Next token is token '-' (2.14: )
-Shifting token '-' (2.14: )
-Entering state 2
-Reading a token
-Next token is token "number" (2.15: 5)
-Shifting token "number" (2.15: 5)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (2.15: 5)
--> $$ = nterm exp (2.15: 5)
-Entering state 10
-Reading a token
-Next token is token '\n' (2.16-3.0: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (2.14: )
-   $2 = nterm exp (2.15: 5)
--> $$ = nterm exp (2.14-15: -5)
-Entering state 28
-Next token is token '\n' (2.16-3.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (2.1-10: -5)
-   $2 = token '=' (2.12: )
-   $3 = nterm exp (2.14-15: -5)
--> $$ = nterm exp (2.1-15: -5)
-Entering state 8
-Next token is token '\n' (2.16-3.0: )
-Shifting token '\n' (2.16-3.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (2.1-15: -5)
-   $2 = token '\n' (2.16-3.0: )
--> $$ = nterm line (2.1-3.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-2.0: )
-   $2 = nterm line (2.1-3.0: )
--> $$ = nterm input (1.1-3.0: )
-Entering state 6
-Reading a token
-Next token is token '\n' (3.1-4.0: )
-Shifting token '\n' (3.1-4.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (3.1-4.0: )
--> $$ = nterm line (3.1-4.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-3.0: )
-   $2 = nterm line (3.1-4.0: )
--> $$ = nterm input (1.1-4.0: )
-Entering state 6
-Reading a token
-Next token is token '-' (4.1: )
-Shifting token '-' (4.1: )
-Entering state 2
-Reading a token
-Next token is token "number" (4.2: 1)
-Shifting token "number" (4.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (4.2: 1)
--> $$ = nterm exp (4.2: 1)
-Entering state 10
-Reading a token
-Next token is token '^' (4.3: )
-Shifting token '^' (4.3: )
-Entering state 24
-Reading a token
-Next token is token "number" (4.4: 2)
-Shifting token "number" (4.4: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (4.4: 2)
--> $$ = nterm exp (4.4: 2)
-Entering state 33
-Reading a token
-Next token is token '=' (4.6: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (4.2: 1)
-   $2 = token '^' (4.3: )
-   $3 = nterm exp (4.4: 2)
--> $$ = nterm exp (4.2-4: 1)
-Entering state 10
-Next token is token '=' (4.6: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (4.1: )
-   $2 = nterm exp (4.2-4: 1)
--> $$ = nterm exp (4.1-4: -1)
-Entering state 8
-Next token is token '=' (4.6: )
-Shifting token '=' (4.6: )
-Entering state 19
-Reading a token
-Next token is token '-' (4.8: )
-Shifting token '-' (4.8: )
-Entering state 2
-Reading a token
-Next token is token "number" (4.9: 1)
-Shifting token "number" (4.9: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (4.9: 1)
--> $$ = nterm exp (4.9: 1)
-Entering state 10
-Reading a token
-Next token is token '\n' (4.10-5.0: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (4.8: )
-   $2 = nterm exp (4.9: 1)
--> $$ = nterm exp (4.8-9: -1)
-Entering state 28
-Next token is token '\n' (4.10-5.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (4.1-4: -1)
-   $2 = token '=' (4.6: )
-   $3 = nterm exp (4.8-9: -1)
--> $$ = nterm exp (4.1-9: -1)
-Entering state 8
-Next token is token '\n' (4.10-5.0: )
-Shifting token '\n' (4.10-5.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (4.1-9: -1)
-   $2 = token '\n' (4.10-5.0: )
--> $$ = nterm line (4.1-5.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-4.0: )
-   $2 = nterm line (4.1-5.0: )
--> $$ = nterm input (1.1-5.0: )
-Entering state 6
-Reading a token
-Next token is token '(' (5.1: )
-Shifting token '(' (5.1: )
-Entering state 4
-Reading a token
-Next token is token '-' (5.2: )
-Shifting token '-' (5.2: )
-Entering state 2
-Reading a token
-Next token is token "number" (5.3: 1)
-Shifting token "number" (5.3: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (5.3: 1)
--> $$ = nterm exp (5.3: 1)
-Entering state 10
-Reading a token
-Next token is token ')' (5.4: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (5.2: )
-   $2 = nterm exp (5.3: 1)
--> $$ = nterm exp (5.2-3: -1)
-Entering state 12
-Next token is token ')' (5.4: )
-Shifting token ')' (5.4: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
-   $1 = token '(' (5.1: )
-   $2 = nterm exp (5.2-3: -1)
-   $3 = token ')' (5.4: )
--> $$ = nterm exp (5.1-4: -1)
-Entering state 8
-Reading a token
-Next token is token '^' (5.5: )
-Shifting token '^' (5.5: )
-Entering state 24
-Reading a token
-Next token is token "number" (5.6: 2)
-Shifting token "number" (5.6: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (5.6: 2)
--> $$ = nterm exp (5.6: 2)
-Entering state 33
-Reading a token
-Next token is token '=' (5.8: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (5.1-4: -1)
-   $2 = token '^' (5.5: )
-   $3 = nterm exp (5.6: 2)
--> $$ = nterm exp (5.1-6: 1)
-Entering state 8
-Next token is token '=' (5.8: )
-Shifting token '=' (5.8: )
-Entering state 19
-Reading a token
-Next token is token "number" (5.10: 1)
-Shifting token "number" (5.10: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (5.10: 1)
--> $$ = nterm exp (5.10: 1)
-Entering state 28
-Reading a token
-Next token is token '\n' (5.11-6.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (5.1-6: 1)
-   $2 = token '=' (5.8: )
-   $3 = nterm exp (5.10: 1)
--> $$ = nterm exp (5.1-10: 1)
-Entering state 8
-Next token is token '\n' (5.11-6.0: )
-Shifting token '\n' (5.11-6.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (5.1-10: 1)
-   $2 = token '\n' (5.11-6.0: )
--> $$ = nterm line (5.1-6.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-5.0: )
-   $2 = nterm line (5.1-6.0: )
--> $$ = nterm input (1.1-6.0: )
-Entering state 6
-Reading a token
-Next token is token '\n' (6.1-7.0: )
-Shifting token '\n' (6.1-7.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (6.1-7.0: )
--> $$ = nterm line (6.1-7.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-6.0: )
-   $2 = nterm line (6.1-7.0: )
--> $$ = nterm input (1.1-7.0: )
-Entering state 6
-Reading a token
-Next token is token '-' (7.1: )
-Shifting token '-' (7.1: )
-Entering state 2
-Reading a token
-Next token is token '-' (7.2: )
-Shifting token '-' (7.2: )
-Entering state 2
-Reading a token
-Next token is token '-' (7.3: )
-Shifting token '-' (7.3: )
-Entering state 2
-Reading a token
-Next token is token "number" (7.4: 1)
-Shifting token "number" (7.4: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (7.4: 1)
--> $$ = nterm exp (7.4: 1)
-Entering state 10
-Reading a token
-Next token is token '=' (7.6: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (7.3: )
-   $2 = nterm exp (7.4: 1)
--> $$ = nterm exp (7.3-4: -1)
-Entering state 10
-Next token is token '=' (7.6: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (7.2: )
-   $2 = nterm exp (7.3-4: -1)
--> $$ = nterm exp (7.2-4: 1)
-Entering state 10
-Next token is token '=' (7.6: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (7.1: )
-   $2 = nterm exp (7.2-4: 1)
--> $$ = nterm exp (7.1-4: -1)
-Entering state 8
-Next token is token '=' (7.6: )
-Shifting token '=' (7.6: )
-Entering state 19
-Reading a token
-Next token is token '-' (7.8: )
-Shifting token '-' (7.8: )
-Entering state 2
-Reading a token
-Next token is token "number" (7.9: 1)
-Shifting token "number" (7.9: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (7.9: 1)
--> $$ = nterm exp (7.9: 1)
-Entering state 10
-Reading a token
-Next token is token '\n' (7.10-8.0: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (7.8: )
-   $2 = nterm exp (7.9: 1)
--> $$ = nterm exp (7.8-9: -1)
-Entering state 28
-Next token is token '\n' (7.10-8.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (7.1-4: -1)
-   $2 = token '=' (7.6: )
-   $3 = nterm exp (7.8-9: -1)
--> $$ = nterm exp (7.1-9: -1)
-Entering state 8
-Next token is token '\n' (7.10-8.0: )
-Shifting token '\n' (7.10-8.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (7.1-9: -1)
-   $2 = token '\n' (7.10-8.0: )
--> $$ = nterm line (7.1-8.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-7.0: )
-   $2 = nterm line (7.1-8.0: )
--> $$ = nterm input (1.1-8.0: )
-Entering state 6
-Reading a token
-Next token is token '\n' (8.1-9.0: )
-Shifting token '\n' (8.1-9.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (8.1-9.0: )
--> $$ = nterm line (8.1-9.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-8.0: )
-   $2 = nterm line (8.1-9.0: )
--> $$ = nterm input (1.1-9.0: )
-Entering state 6
-Reading a token
-Next token is token "number" (9.1: 1)
-Shifting token "number" (9.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (9.1: 1)
--> $$ = nterm exp (9.1: 1)
-Entering state 8
-Reading a token
-Next token is token '-' (9.3: )
-Shifting token '-' (9.3: )
-Entering state 20
-Reading a token
-Next token is token "number" (9.5: 2)
-Shifting token "number" (9.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (9.5: 2)
--> $$ = nterm exp (9.5: 2)
-Entering state 29
-Reading a token
-Next token is token '-' (9.7: )
-Reducing stack 0 by rule 8 (line 100):
-   $1 = nterm exp (9.1: 1)
-   $2 = token '-' (9.3: )
-   $3 = nterm exp (9.5: 2)
--> $$ = nterm exp (9.1-5: -1)
-Entering state 8
-Next token is token '-' (9.7: )
-Shifting token '-' (9.7: )
-Entering state 20
-Reading a token
-Next token is token "number" (9.9: 3)
-Shifting token "number" (9.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (9.9: 3)
--> $$ = nterm exp (9.9: 3)
-Entering state 29
-Reading a token
-Next token is token '=' (9.11: )
-Reducing stack 0 by rule 8 (line 100):
-   $1 = nterm exp (9.1-5: -1)
-   $2 = token '-' (9.7: )
-   $3 = nterm exp (9.9: 3)
--> $$ = nterm exp (9.1-9: -4)
-Entering state 8
-Next token is token '=' (9.11: )
-Shifting token '=' (9.11: )
-Entering state 19
-Reading a token
-Next token is token '-' (9.13: )
-Shifting token '-' (9.13: )
-Entering state 2
-Reading a token
-Next token is token "number" (9.14: 4)
-Shifting token "number" (9.14: 4)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (9.14: 4)
--> $$ = nterm exp (9.14: 4)
-Entering state 10
-Reading a token
-Next token is token '\n' (9.15-10.0: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (9.13: )
-   $2 = nterm exp (9.14: 4)
--> $$ = nterm exp (9.13-14: -4)
-Entering state 28
-Next token is token '\n' (9.15-10.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (9.1-9: -4)
-   $2 = token '=' (9.11: )
-   $3 = nterm exp (9.13-14: -4)
--> $$ = nterm exp (9.1-14: -4)
-Entering state 8
-Next token is token '\n' (9.15-10.0: )
-Shifting token '\n' (9.15-10.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (9.1-14: -4)
-   $2 = token '\n' (9.15-10.0: )
--> $$ = nterm line (9.1-10.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-9.0: )
-   $2 = nterm line (9.1-10.0: )
--> $$ = nterm input (1.1-10.0: )
-Entering state 6
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+input:
+./calc.at:1409: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | 1 = 2 = 3
+./calc.at:1411:  $PREPARSER ./calc  input
+./calc.at:1407: cat stderr
+stderr:
+Starting parse
+Entering state 0
 Reading a token
-Next token is token "number" (10.1: 1)
-Shifting token "number" (10.1: 1)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (10.1: 1)
--> $$ = nterm exp (10.1: 1)
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
 Reading a token
-Next token is token '-' (10.3: )
-Shifting token '-' (10.3: )
-Entering state 20
-Reading a token
-Next token is token '(' (10.5: )
-Shifting token '(' (10.5: )
-Entering state 4
-Reading a token
-Next token is token "number" (10.6: 2)
-Shifting token "number" (10.6: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (10.6: 2)
--> $$ = nterm exp (10.6: 2)
-Entering state 12
-Reading a token
-Next token is token '-' (10.8: )
-Shifting token '-' (10.8: )
-Entering state 20
-Reading a token
-Next token is token "number" (10.10: 3)
-Shifting token "number" (10.10: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (10.10: 3)
--> $$ = nterm exp (10.10: 3)
-Entering state 29
-Reading a token
-Next token is token ')' (10.11: )
-Reducing stack 0 by rule 8 (line 100):
-   $1 = nterm exp (10.6: 2)
-   $2 = token '-' (10.8: )
-   $3 = nterm exp (10.10: 3)
--> $$ = nterm exp (10.6-10: -1)
-Entering state 12
-Next token is token ')' (10.11: )
-Shifting token ')' (10.11: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
-   $1 = token '(' (10.5: )
-   $2 = nterm exp (10.6-10: -1)
-   $3 = token ')' (10.11: )
--> $$ = nterm exp (10.5-11: -1)
-Entering state 29
-Reading a token
-Next token is token '=' (10.13: )
-Reducing stack 0 by rule 8 (line 100):
-   $1 = nterm exp (10.1: 1)
-   $2 = token '-' (10.3: )
-   $3 = nterm exp (10.5-11: -1)
--> $$ = nterm exp (10.1-11: 2)
-Entering state 8
-Next token is token '=' (10.13: )
-Shifting token '=' (10.13: )
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
 Entering state 19
 Reading a token
-Next token is token "number" (10.15: 2)
-Shifting token "number" (10.15: 2)
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (10.15: 2)
--> $$ = nterm exp (10.15: 2)
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
 Entering state 28
 Reading a token
-Next token is token '\n' (10.16-11.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (10.1-11: 2)
-   $2 = token '=' (10.13: )
-   $3 = nterm exp (10.15: 2)
--> $$ = nterm exp (10.1-15: 2)
-Entering state 8
-Next token is token '\n' (10.16-11.0: )
-Shifting token '\n' (10.16-11.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (10.1-15: 2)
-   $2 = token '\n' (10.16-11.0: )
--> $$ = nterm line (10.1-11.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-10.0: )
-   $2 = nterm line (10.1-11.0: )
--> $$ = nterm input (1.1-11.0: )
-Entering state 6
-Reading a token
-Next token is token '\n' (11.1-12.0: )
-Shifting token '\n' (11.1-12.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (11.1-12.0: )
--> $$ = nterm line (11.1-12.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-11.0: )
-   $2 = nterm line (11.1-12.0: )
--> $$ = nterm input (1.1-12.0: )
-Entering state 6
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Error: popping token '=' (1.3: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '=' (1.7: )
+./calc.at:1409: cat stderr
+./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1408: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+./calc.at:1408: cat stderr
+input:
+Starting parse
+Entering state 0
 Reading a token
-Next token is token "number" (12.1: 2)
-Shifting token "number" (12.1: 2)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (12.1: 2)
--> $$ = nterm exp (12.1: 2)
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
 Reading a token
-Next token is token '^' (12.2: )
-Shifting token '^' (12.2: )
-Entering state 24
-Reading a token
-Next token is token "number" (12.3: 2)
-Shifting token "number" (12.3: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (12.3: 2)
--> $$ = nterm exp (12.3: 2)
-Entering state 33
-Reading a token
-Next token is token '^' (12.4: )
-Shifting token '^' (12.4: )
-Entering state 24
-Reading a token
-Next token is token "number" (12.5: 3)
-Shifting token "number" (12.5: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (12.5: 3)
--> $$ = nterm exp (12.5: 3)
-Entering state 33
-Reading a token
-Next token is token '=' (12.7: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (12.3: 2)
-   $2 = token '^' (12.4: )
-   $3 = nterm exp (12.5: 3)
--> $$ = nterm exp (12.3-5: 8)
-Entering state 33
-Next token is token '=' (12.7: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (12.1: 2)
-   $2 = token '^' (12.2: )
-   $3 = nterm exp (12.3-5: 8)
--> $$ = nterm exp (12.1-5: 256)
-Entering state 8
-Next token is token '=' (12.7: )
-Shifting token '=' (12.7: )
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
 Entering state 19
 Reading a token
-Next token is token "number" (12.9-11: 256)
-Shifting token "number" (12.9-11: 256)
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (12.9-11: 256)
--> $$ = nterm exp (12.9-11: 256)
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
 Entering state 28
 Reading a token
-Next token is token '\n' (12.12-13.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (12.1-5: 256)
-   $2 = token '=' (12.7: )
-   $3 = nterm exp (12.9-11: 256)
--> $$ = nterm exp (12.1-11: 256)
-Entering state 8
-Next token is token '\n' (12.12-13.0: )
-Shifting token '\n' (12.12-13.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (12.1-11: 256)
-   $2 = token '\n' (12.12-13.0: )
--> $$ = nterm line (12.1-13.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-12.0: )
-   $2 = nterm line (12.1-13.0: )
--> $$ = nterm input (1.1-13.0: )
-Entering state 6
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Error: popping token '=' (1.3: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '=' (1.7: )
+  | (1 + #) = 1111
+./calc.at:1407:  $PREPARSER ./calc  input
+  | 1 + 2 * 3 + !* ++
+./calc.at:1409:  $PREPARSER ./calc  input
+stderr:
+stderr:
+Starting parse
+Entering state 0
 Reading a token
-Next token is token '(' (13.1: )
-Shifting token '(' (13.1: )
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (13.2: 2)
-Shifting token "number" (13.2: 2)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (13.2: 2)
--> $$ = nterm exp (13.2: 2)
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 12
 Reading a token
-Next token is token '^' (13.3: )
-Shifting token '^' (13.3: )
-Entering state 24
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
 Reading a token
-Next token is token "number" (13.4: 2)
-Shifting token "number" (13.4: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (13.4: 2)
--> $$ = nterm exp (13.4: 2)
-Entering state 33
+syntax error: invalid character: '#'
+Next token is token error (1.1: )
+Error: popping token '+' (1.1: )
+Error: popping nterm exp (1.1: 1)
+Shifting token error (1.1: )
+Entering state 11
+Next token is token error (1.1: )
+Error: discarding token error (1.1: )
 Reading a token
-Next token is token ')' (13.5: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (13.2: 2)
-   $2 = token '^' (13.3: )
-   $3 = nterm exp (13.4: 2)
--> $$ = nterm exp (13.2-4: 4)
-Entering state 12
-Next token is token ')' (13.5: )
-Shifting token ')' (13.5: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
-   $1 = token '(' (13.1: )
-   $2 = nterm exp (13.2-4: 4)
-   $3 = token ')' (13.5: )
--> $$ = nterm exp (13.1-5: 4)
+Next token is token ')' (1.1: )
+Entering state 11
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
 Entering state 8
 Reading a token
-Next token is token '^' (13.6: )
-Shifting token '^' (13.6: )
-Entering state 24
-Reading a token
-Next token is token "number" (13.7: 3)
-Shifting token "number" (13.7: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (13.7: 3)
--> $$ = nterm exp (13.7: 3)
-Entering state 33
-Reading a token
-Next token is token '=' (13.9: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (13.1-5: 4)
-   $2 = token '^' (13.6: )
-   $3 = nterm exp (13.7: 3)
--> $$ = nterm exp (13.1-7: 64)
-Entering state 8
-Next token is token '=' (13.9: )
-Shifting token '=' (13.9: )
+Next token is token '=' (1.1: )
+Shifting token '=' (1.1: )
 Entering state 19
 Reading a token
-Next token is token "number" (13.11-12: 64)
-Shifting token "number" (13.11-12: 64)
+Next token is token "number" (1.1: 1111)
+Shifting token "number" (1.1: 1111)
 Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (13.11-12: 64)
--> $$ = nterm exp (13.11-12: 64)
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
 Entering state 28
 Reading a token
-Next token is token '\n' (13.13-14.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (13.1-7: 64)
-   $2 = token '=' (13.9: )
-   $3 = nterm exp (13.11-12: 64)
--> $$ = nterm exp (13.1-12: 64)
+Next token is token '\n' (1.1: )
+Reducing stack 0 by rule 6 (line 82):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '=' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
 Entering state 8
-Next token is token '\n' (13.13-14.0: )
-Shifting token '\n' (13.13-14.0: )
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
 Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (13.1-12: 64)
-   $2 = token '\n' (13.13-14.0: )
--> $$ = nterm line (13.1-14.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-13.0: )
-   $2 = nterm line (13.1-14.0: )
--> $$ = nterm input (1.1-14.0: )
+Reducing stack 0 by rule 4 (line 77):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token "end of input" (14.1: )
+Shifting token "end of input" (1.1: )
 Entering state 17
-Cleanup: popping token "end of input" (14.1: )
-Cleanup: popping nterm input (1.1-14.0: )
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "invalid token" (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token "invalid token" (1.1: )
-./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1409: "$PERL" -pi -e 'use strict;
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
+./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1411: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -104708,16 +105205,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "invalid token" (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token "invalid token" (1.1: )
-stderr:
-./calc.at:1409: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -104753,808 +105240,349 @@
 -> $$ = nterm exp (1.9: 3)
 Entering state 31
 Reading a token
-Next token is token '=' (1.11: )
-Reducing stack 0 by rule 9 (line 101):
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 107):
    $1 = nterm exp (1.5: 2)
    $2 = token '*' (1.7: )
    $3 = nterm exp (1.9: 3)
 -> $$ = nterm exp (1.5-9: 6)
 Entering state 30
-Next token is token '=' (1.11: )
-Reducing stack 0 by rule 7 (line 99):
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 105):
    $1 = nterm exp (1.1: 1)
    $2 = token '+' (1.3: )
    $3 = nterm exp (1.5-9: 6)
 -> $$ = nterm exp (1.1-9: 7)
 Entering state 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.13: 7)
-Shifting token "number" (1.13: 7)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.13: 7)
--> $$ = nterm exp (1.13: 7)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.14-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-9: 7)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13: 7)
--> $$ = nterm exp (1.1-13: 7)
-Entering state 8
-Next token is token '\n' (1.14-2.0: )
-Shifting token '\n' (1.14-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-13: 7)
-   $2 = token '\n' (1.14-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Next token is token "number" (2.1: 1)
-Shifting token "number" (2.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (2.1: 1)
--> $$ = nterm exp (2.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (2.3: )
-Shifting token '+' (2.3: )
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
 Entering state 21
 Reading a token
-Next token is token "number" (2.5: 2)
-Shifting token "number" (2.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (2.5: 2)
--> $$ = nterm exp (2.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (2.7: )
-Shifting token '*' (2.7: )
-Entering state 22
-Reading a token
-Next token is token '-' (2.9: )
-Shifting token '-' (2.9: )
-Entering state 2
-Reading a token
-Next token is token "number" (2.10: 3)
-Shifting token "number" (2.10: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (2.10: 3)
--> $$ = nterm exp (2.10: 3)
-Entering state 10
-Reading a token
-Next token is token '=' (2.12: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (2.9: )
-   $2 = nterm exp (2.10: 3)
--> $$ = nterm exp (2.9-10: -3)
-Entering state 31
-Next token is token '=' (2.12: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (2.5: 2)
-   $2 = token '*' (2.7: )
-   $3 = nterm exp (2.9-10: -3)
--> $$ = nterm exp (2.5-10: -6)
-Entering state 30
-Next token is token '=' (2.12: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (2.1: 1)
-   $2 = token '+' (2.3: )
-   $3 = nterm exp (2.5-10: -6)
--> $$ = nterm exp (2.1-10: -5)
-Entering state 8
-Next token is token '=' (2.12: )
-Shifting token '=' (2.12: )
-Entering state 19
-Reading a token
-Next token is token '-' (2.14: )
-Shifting token '-' (2.14: )
-Entering state 2
-Reading a token
-Next token is token "number" (2.15: 5)
-Shifting token "number" (2.15: 5)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (2.15: 5)
--> $$ = nterm exp (2.15: 5)
-Entering state 10
-Reading a token
-Next token is token '\n' (2.16-3.0: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (2.14: )
-   $2 = nterm exp (2.15: 5)
--> $$ = nterm exp (2.14-15: -5)
-Entering state 28
-Next token is token '\n' (2.16-3.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (2.1-10: -5)
-   $2 = token '=' (2.12: )
-   $3 = nterm exp (2.14-15: -5)
--> $$ = nterm exp (2.1-15: -5)
-Entering state 8
-Next token is token '\n' (2.16-3.0: )
-Shifting token '\n' (2.16-3.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (2.1-15: -5)
-   $2 = token '\n' (2.16-3.0: )
--> $$ = nterm line (2.1-3.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-2.0: )
-   $2 = nterm line (2.1-3.0: )
--> $$ = nterm input (1.1-3.0: )
-Entering state 6
-Reading a token
-Next token is token '\n' (3.1-4.0: )
-Shifting token '\n' (3.1-4.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (3.1-4.0: )
--> $$ = nterm line (3.1-4.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-3.0: )
-   $2 = nterm line (3.1-4.0: )
--> $$ = nterm input (1.1-4.0: )
-Entering state 6
-Reading a token
-Next token is token '-' (4.1: )
-Shifting token '-' (4.1: )
-Entering state 2
-Reading a token
-Next token is token "number" (4.2: 1)
-Shifting token "number" (4.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (4.2: 1)
--> $$ = nterm exp (4.2: 1)
-Entering state 10
-Reading a token
-Next token is token '^' (4.3: )
-Shifting token '^' (4.3: )
-Entering state 24
-Reading a token
-Next token is token "number" (4.4: 2)
-Shifting token "number" (4.4: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (4.4: 2)
--> $$ = nterm exp (4.4: 2)
-Entering state 33
-Reading a token
-Next token is token '=' (4.6: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (4.2: 1)
-   $2 = token '^' (4.3: )
-   $3 = nterm exp (4.4: 2)
--> $$ = nterm exp (4.2-4: 1)
-Entering state 10
-Next token is token '=' (4.6: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (4.1: )
-   $2 = nterm exp (4.2-4: 1)
--> $$ = nterm exp (4.1-4: -1)
-Entering state 8
-Next token is token '=' (4.6: )
-Shifting token '=' (4.6: )
-Entering state 19
-Reading a token
-Next token is token '-' (4.8: )
-Shifting token '-' (4.8: )
-Entering state 2
-Reading a token
-Next token is token "number" (4.9: 1)
-Shifting token "number" (4.9: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (4.9: 1)
--> $$ = nterm exp (4.9: 1)
-Entering state 10
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
 Reading a token
-Next token is token '\n' (4.10-5.0: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (4.8: )
-   $2 = nterm exp (4.9: 1)
--> $$ = nterm exp (4.8-9: -1)
-Entering state 28
-Next token is token '\n' (4.10-5.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (4.1-4: -1)
-   $2 = token '=' (4.6: )
-   $3 = nterm exp (4.8-9: -1)
--> $$ = nterm exp (4.1-9: -1)
-Entering state 8
-Next token is token '\n' (4.10-5.0: )
-Shifting token '\n' (4.10-5.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (4.1-9: -1)
-   $2 = token '\n' (4.10-5.0: )
--> $$ = nterm line (4.1-5.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-4.0: )
-   $2 = nterm line (4.1-5.0: )
--> $$ = nterm input (1.1-5.0: )
-Entering state 6
+Next token is token '*' (1.14: )
+Shifting token '*' (1.14: )
+Entering state 15
+Reducing stack 0 by rule 19 (line 131):
+   $1 = token '!' (1.13: )
+   $2 = token '*' (1.14: )
+1.14: memory exhausted
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+input:
+./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1411: cat stderr
+stderr:
+  | (1 + #) = 1111
+./calc.at:1408:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
 Reading a token
-Next token is token '(' (5.1: )
-Shifting token '(' (5.1: )
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token '-' (5.2: )
-Shifting token '-' (5.2: )
-Entering state 2
-Reading a token
-Next token is token "number" (5.3: 1)
-Shifting token "number" (5.3: 1)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (5.3: 1)
--> $$ = nterm exp (5.3: 1)
-Entering state 10
-Reading a token
-Next token is token ')' (5.4: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (5.2: )
-   $2 = nterm exp (5.3: 1)
--> $$ = nterm exp (5.2-3: -1)
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 12
-Next token is token ')' (5.4: )
-Shifting token ')' (5.4: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
-   $1 = token '(' (5.1: )
-   $2 = nterm exp (5.2-3: -1)
-   $3 = token ')' (5.4: )
--> $$ = nterm exp (5.1-4: -1)
-Entering state 8
-Reading a token
-Next token is token '^' (5.5: )
-Shifting token '^' (5.5: )
-Entering state 24
-Reading a token
-Next token is token "number" (5.6: 2)
-Shifting token "number" (5.6: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (5.6: 2)
--> $$ = nterm exp (5.6: 2)
-Entering state 33
 Reading a token
-Next token is token '=' (5.8: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (5.1-4: -1)
-   $2 = token '^' (5.5: )
-   $3 = nterm exp (5.6: 2)
--> $$ = nterm exp (5.1-6: 1)
-Entering state 8
-Next token is token '=' (5.8: )
-Shifting token '=' (5.8: )
-Entering state 19
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
 Reading a token
-Next token is token "number" (5.10: 1)
-Shifting token "number" (5.10: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (5.10: 1)
--> $$ = nterm exp (5.10: 1)
-Entering state 28
+syntax error: invalid character: '#'
+Next token is token error (1.1: )
+Error: popping token '+' (1.1: )
+Error: popping nterm exp (1.1: 1)
+Shifting token error (1.1: )
+Entering state 11
+Next token is token error (1.1: )
+Error: discarding token error (1.1: )
 Reading a token
-Next token is token '\n' (5.11-6.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (5.1-6: 1)
-   $2 = token '=' (5.8: )
-   $3 = nterm exp (5.10: 1)
--> $$ = nterm exp (5.1-10: 1)
+Next token is token ')' (1.1: )
+Entering state 11
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
 Entering state 8
-Next token is token '\n' (5.11-6.0: )
-Shifting token '\n' (5.11-6.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (5.1-10: 1)
-   $2 = token '\n' (5.11-6.0: )
--> $$ = nterm line (5.1-6.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-5.0: )
-   $2 = nterm line (5.1-6.0: )
--> $$ = nterm input (1.1-6.0: )
-Entering state 6
-Reading a token
-Next token is token '\n' (6.1-7.0: )
-Shifting token '\n' (6.1-7.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (6.1-7.0: )
--> $$ = nterm line (6.1-7.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-6.0: )
-   $2 = nterm line (6.1-7.0: )
--> $$ = nterm input (1.1-7.0: )
-Entering state 6
-Reading a token
-Next token is token '-' (7.1: )
-Shifting token '-' (7.1: )
-Entering state 2
-Reading a token
-Next token is token '-' (7.2: )
-Shifting token '-' (7.2: )
-Entering state 2
-Reading a token
-Next token is token '-' (7.3: )
-Shifting token '-' (7.3: )
-Entering state 2
-Reading a token
-Next token is token "number" (7.4: 1)
-Shifting token "number" (7.4: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (7.4: 1)
--> $$ = nterm exp (7.4: 1)
-Entering state 10
 Reading a token
-Next token is token '=' (7.6: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (7.3: )
-   $2 = nterm exp (7.4: 1)
--> $$ = nterm exp (7.3-4: -1)
-Entering state 10
-Next token is token '=' (7.6: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (7.2: )
-   $2 = nterm exp (7.3-4: -1)
--> $$ = nterm exp (7.2-4: 1)
-Entering state 10
-Next token is token '=' (7.6: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (7.1: )
-   $2 = nterm exp (7.2-4: 1)
--> $$ = nterm exp (7.1-4: -1)
-Entering state 8
-Next token is token '=' (7.6: )
-Shifting token '=' (7.6: )
+Next token is token '=' (1.1: )
+Shifting token '=' (1.1: )
 Entering state 19
 Reading a token
-Next token is token '-' (7.8: )
-Shifting token '-' (7.8: )
-Entering state 2
-Reading a token
-Next token is token "number" (7.9: 1)
-Shifting token "number" (7.9: 1)
+Next token is token "number" (1.1: 1111)
+Shifting token "number" (1.1: 1111)
 Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (7.9: 1)
--> $$ = nterm exp (7.9: 1)
-Entering state 10
-Reading a token
-Next token is token '\n' (7.10-8.0: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (7.8: )
-   $2 = nterm exp (7.9: 1)
--> $$ = nterm exp (7.8-9: -1)
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
 Entering state 28
-Next token is token '\n' (7.10-8.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (7.1-4: -1)
-   $2 = token '=' (7.6: )
-   $3 = nterm exp (7.8-9: -1)
--> $$ = nterm exp (7.1-9: -1)
+Reading a token
+Next token is token '\n' (1.1: )
+Reducing stack 0 by rule 6 (line 82):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '=' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
 Entering state 8
-Next token is token '\n' (7.10-8.0: )
-Shifting token '\n' (7.10-8.0: )
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
 Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (7.1-9: -1)
-   $2 = token '\n' (7.10-8.0: )
--> $$ = nterm line (7.1-8.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-7.0: )
-   $2 = nterm line (7.1-8.0: )
--> $$ = nterm input (1.1-8.0: )
+Reducing stack 0 by rule 4 (line 77):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
 Entering state 6
 Reading a token
-Next token is token '\n' (8.1-9.0: )
-Shifting token '\n' (8.1-9.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (8.1-9.0: )
--> $$ = nterm line (8.1-9.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-8.0: )
-   $2 = nterm line (8.1-9.0: )
--> $$ = nterm input (1.1-9.0: )
-Entering state 6
+Now at end of input.
+Shifting token "end of input" (1.1: )
+Entering state 17
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
+stderr:
+Starting parse
+Entering state 0
 Reading a token
-Next token is token "number" (9.1: 1)
-Shifting token "number" (9.1: 1)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (9.1: 1)
--> $$ = nterm exp (9.1: 1)
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
 Reading a token
-Next token is token '-' (9.3: )
-Shifting token '-' (9.3: )
-Entering state 20
-Reading a token
-Next token is token "number" (9.5: 2)
-Shifting token "number" (9.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (9.5: 2)
--> $$ = nterm exp (9.5: 2)
-Entering state 29
-Reading a token
-Next token is token '-' (9.7: )
-Reducing stack 0 by rule 8 (line 100):
-   $1 = nterm exp (9.1: 1)
-   $2 = token '-' (9.3: )
-   $3 = nterm exp (9.5: 2)
--> $$ = nterm exp (9.1-5: -1)
-Entering state 8
-Next token is token '-' (9.7: )
-Shifting token '-' (9.7: )
-Entering state 20
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
 Reading a token
-Next token is token "number" (9.9: 3)
-Shifting token "number" (9.9: 3)
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (9.9: 3)
--> $$ = nterm exp (9.9: 3)
-Entering state 29
-Reading a token
-Next token is token '=' (9.11: )
-Reducing stack 0 by rule 8 (line 100):
-   $1 = nterm exp (9.1-5: -1)
-   $2 = token '-' (9.7: )
-   $3 = nterm exp (9.9: 3)
--> $$ = nterm exp (9.1-9: -4)
-Entering state 8
-Next token is token '=' (9.11: )
-Shifting token '=' (9.11: )
-Entering state 19
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
 Reading a token
-Next token is token '-' (9.13: )
-Shifting token '-' (9.13: )
-Entering state 2
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
 Reading a token
-Next token is token "number" (9.14: 4)
-Shifting token "number" (9.14: 4)
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (9.14: 4)
--> $$ = nterm exp (9.14: 4)
-Entering state 10
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
 Reading a token
-Next token is token '\n' (9.15-10.0: )
-Reducing stack 0 by rule 11 (line 111):
-   $1 = token '-' (9.13: )
-   $2 = nterm exp (9.14: 4)
--> $$ = nterm exp (9.13-14: -4)
-Entering state 28
-Next token is token '\n' (9.15-10.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (9.1-9: -4)
-   $2 = token '=' (9.11: )
-   $3 = nterm exp (9.13-14: -4)
--> $$ = nterm exp (9.1-14: -4)
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 107):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
-Next token is token '\n' (9.15-10.0: )
-Shifting token '\n' (9.15-10.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (9.1-14: -4)
-   $2 = token '\n' (9.15-10.0: )
--> $$ = nterm line (9.1-10.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-9.0: )
-   $2 = nterm line (9.1-10.0: )
--> $$ = nterm input (1.1-10.0: )
-Entering state 6
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
 Reading a token
-Next token is token "number" (10.1: 1)
-Shifting token "number" (10.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (10.1: 1)
--> $$ = nterm exp (10.1: 1)
-Entering state 8
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
 Reading a token
-Next token is token '-' (10.3: )
-Shifting token '-' (10.3: )
-Entering state 20
+Next token is token '*' (1.14: )
+Shifting token '*' (1.14: )
+Entering state 15
+Reducing stack 0 by rule 19 (line 131):
+   $1 = token '!' (1.13: )
+   $2 = token '*' (1.14: )
+1.14: memory exhausted
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+input:
+Starting parse
+Entering state 0
 Reading a token
-Next token is token '(' (10.5: )
-Shifting token '(' (10.5: )
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (10.6: 2)
-Shifting token "number" (10.6: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (10.6: 2)
--> $$ = nterm exp (10.6: 2)
-Entering state 12
-Reading a token
-Next token is token '-' (10.8: )
-Shifting token '-' (10.8: )
-Entering state 20
-Reading a token
-Next token is token "number" (10.10: 3)
-Shifting token "number" (10.10: 3)
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (10.10: 3)
--> $$ = nterm exp (10.10: 3)
-Entering state 29
-Reading a token
-Next token is token ')' (10.11: )
-Reducing stack 0 by rule 8 (line 100):
-   $1 = nterm exp (10.6: 2)
-   $2 = token '-' (10.8: )
-   $3 = nterm exp (10.10: 3)
--> $$ = nterm exp (10.6-10: -1)
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
 Entering state 12
-Next token is token ')' (10.11: )
-Shifting token ')' (10.11: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
-   $1 = token '(' (10.5: )
-   $2 = nterm exp (10.6-10: -1)
-   $3 = token ')' (10.11: )
--> $$ = nterm exp (10.5-11: -1)
-Entering state 29
-Reading a token
-Next token is token '=' (10.13: )
-Reducing stack 0 by rule 8 (line 100):
-   $1 = nterm exp (10.1: 1)
-   $2 = token '-' (10.3: )
-   $3 = nterm exp (10.5-11: -1)
--> $$ = nterm exp (10.1-11: 2)
-Entering state 8
-Next token is token '=' (10.13: )
-Shifting token '=' (10.13: )
-Entering state 19
-Reading a token
-Next token is token "number" (10.15: 2)
-Shifting token "number" (10.15: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (10.15: 2)
--> $$ = nterm exp (10.15: 2)
-Entering state 28
 Reading a token
-Next token is token '\n' (10.16-11.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (10.1-11: 2)
-   $2 = token '=' (10.13: )
-   $3 = nterm exp (10.15: 2)
--> $$ = nterm exp (10.1-15: 2)
-Entering state 8
-Next token is token '\n' (10.16-11.0: )
-Shifting token '\n' (10.16-11.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (10.1-15: 2)
-   $2 = token '\n' (10.16-11.0: )
--> $$ = nterm line (10.1-11.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-10.0: )
-   $2 = nterm line (10.1-11.0: )
--> $$ = nterm input (1.1-11.0: )
-Entering state 6
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
 Reading a token
-Next token is token '\n' (11.1-12.0: )
-Shifting token '\n' (11.1-12.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (11.1-12.0: )
--> $$ = nterm line (11.1-12.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-11.0: )
-   $2 = nterm line (11.1-12.0: )
--> $$ = nterm input (1.1-12.0: )
-Entering state 6
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
 Reading a token
-Next token is token "number" (12.1: 2)
-Shifting token "number" (12.1: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (12.1: 2)
--> $$ = nterm exp (12.1: 2)
+Next token is token ')' (1.7: )
+Entering state 11
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
 Reading a token
-Next token is token '^' (12.2: )
-Shifting token '^' (12.2: )
-Entering state 24
-Reading a token
-Next token is token "number" (12.3: 2)
-Shifting token "number" (12.3: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (12.3: 2)
--> $$ = nterm exp (12.3: 2)
-Entering state 33
-Reading a token
-Next token is token '^' (12.4: )
-Shifting token '^' (12.4: )
-Entering state 24
-Reading a token
-Next token is token "number" (12.5: 3)
-Shifting token "number" (12.5: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (12.5: 3)
--> $$ = nterm exp (12.5: 3)
-Entering state 33
-Reading a token
-Next token is token '=' (12.7: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (12.3: 2)
-   $2 = token '^' (12.4: )
-   $3 = nterm exp (12.5: 3)
--> $$ = nterm exp (12.3-5: 8)
-Entering state 33
-Next token is token '=' (12.7: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (12.1: 2)
-   $2 = token '^' (12.2: )
-   $3 = nterm exp (12.3-5: 8)
--> $$ = nterm exp (12.1-5: 256)
-Entering state 8
-Next token is token '=' (12.7: )
-Shifting token '=' (12.7: )
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
 Entering state 19
 Reading a token
-Next token is token "number" (12.9-11: 256)
-Shifting token "number" (12.9-11: 256)
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (12.9-11: 256)
--> $$ = nterm exp (12.9-11: 256)
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
 Entering state 28
 Reading a token
-Next token is token '\n' (12.12-13.0: )
+Next token is token '\n' (1.15-2.0: )
 Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (12.1-5: 256)
-   $2 = token '=' (12.7: )
-   $3 = nterm exp (12.9-11: 256)
--> $$ = nterm exp (12.1-11: 256)
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
 Entering state 8
-Next token is token '\n' (12.12-13.0: )
-Shifting token '\n' (12.12-13.0: )
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (12.1-11: 256)
-   $2 = token '\n' (12.12-13.0: )
--> $$ = nterm line (12.1-13.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-12.0: )
-   $2 = nterm line (12.1-13.0: )
--> $$ = nterm input (1.1-13.0: )
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Reading a token
-Next token is token '(' (13.1: )
-Shifting token '(' (13.1: )
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 
+  | +1
+./calc.at:1411:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (13.2: 2)
-Shifting token "number" (13.2: 2)
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (13.2: 2)
--> $$ = nterm exp (13.2: 2)
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
 Entering state 12
 Reading a token
-Next token is token '^' (13.3: )
-Shifting token '^' (13.3: )
-Entering state 24
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
 Reading a token
-Next token is token "number" (13.4: 2)
-Shifting token "number" (13.4: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (13.4: 2)
--> $$ = nterm exp (13.4: 2)
-Entering state 33
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
 Reading a token
-Next token is token ')' (13.5: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (13.2: 2)
-   $2 = token '^' (13.3: )
-   $3 = nterm exp (13.4: 2)
--> $$ = nterm exp (13.2-4: 4)
-Entering state 12
-Next token is token ')' (13.5: )
-Shifting token ')' (13.5: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
-   $1 = token '(' (13.1: )
-   $2 = nterm exp (13.2-4: 4)
-   $3 = token ')' (13.5: )
--> $$ = nterm exp (13.1-5: 4)
+Next token is token ')' (1.7: )
+Entering state 11
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
 Reading a token
-Next token is token '^' (13.6: )
-Shifting token '^' (13.6: )
-Entering state 24
-Reading a token
-Next token is token "number" (13.7: 3)
-Shifting token "number" (13.7: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (13.7: 3)
--> $$ = nterm exp (13.7: 3)
-Entering state 33
-Reading a token
-Next token is token '=' (13.9: )
-Reducing stack 0 by rule 12 (line 112):
-   $1 = nterm exp (13.1-5: 4)
-   $2 = token '^' (13.6: )
-   $3 = nterm exp (13.7: 3)
--> $$ = nterm exp (13.1-7: 64)
-Entering state 8
-Next token is token '=' (13.9: )
-Shifting token '=' (13.9: )
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
 Entering state 19
 Reading a token
-Next token is token "number" (13.11-12: 64)
-Shifting token "number" (13.11-12: 64)
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (13.11-12: 64)
--> $$ = nterm exp (13.11-12: 64)
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
 Entering state 28
 Reading a token
-Next token is token '\n' (13.13-14.0: )
+Next token is token '\n' (1.15-2.0: )
 Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (13.1-7: 64)
-   $2 = token '=' (13.9: )
-   $3 = nterm exp (13.11-12: 64)
--> $$ = nterm exp (13.1-12: 64)
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
 Entering state 8
-Next token is token '\n' (13.13-14.0: )
-Shifting token '\n' (13.13-14.0: )
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (13.1-12: 64)
-   $2 = token '\n' (13.13-14.0: )
--> $$ = nterm line (13.1-14.0: )
-Entering state 18
-Reducing stack 0 by rule 2 (line 79):
-   $1 = nterm input (1.1-13.0: )
-   $2 = nterm line (13.1-14.0: )
--> $$ = nterm input (1.1-14.0: )
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token "end of input" (14.1: )
+Shifting token "end of input" (2.1: )
 Entering state 17
-Cleanup: popping token "end of input" (14.1: )
-Cleanup: popping nterm input (1.1-14.0: )
-./calc.at:1408: "$PERL" -pi -e 'use strict;
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1409: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -105564,10 +105592,28 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | 1 2
-./calc.at:1413:  $PREPARSER ./calc  input
-./calc.at:1411: "$PERL" -pi -e 'use strict;
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Cleanup: discarding lookahead token '+' (2.1: )
+./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1407: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -105577,114 +105623,43 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1409: cat stderr
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token "number" (1.3: 2)
-1.3: syntax error, unexpected number
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token "number" (1.3: 2)
-./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-stderr:
-./calc.at:1411: cat stderr
-  | 1 = 2 = 3
-./calc.at:1408: cat stderr
-./calc.at:1409:  $PREPARSER ./calc  input
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token "number" (1.3: 2)
-1.3: syntax error, unexpected number
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token "number" (1.3: 2)
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 28
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
 Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Error: popping token '=' (1.3: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '=' (1.7: )
-./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1411:  $PREPARSER ./calc  input
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Cleanup: discarding lookahead token '+' (2.1: )
+./calc.at:1408: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 input:
   | (#) + (#) = 2222
+./calc.at:1409:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1408:  $PREPARSER ./calc  input
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 28
-Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Error: popping token '=' (1.3: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '=' (1.7: )
-./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1413: "$PERL" -pi -e 'use strict;
+./calc.at:1407: cat stderr
+./calc.at:1411: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -105694,7 +105669,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
+./calc.at:1408: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -105792,70 +105767,13 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 28
-Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Error: popping token '=' (1.3: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '=' (1.7: )
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 28
-Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Error: popping token '=' (1.3: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '=' (1.7: )
-./calc.at:1413: cat stderr
+./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1411: cat stderr
+input:
 stderr:
+  | (# + 1) = 1111
 input:
-  | 1//2
+./calc.at:1407:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -105953,135 +105871,80 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1413:  $PREPARSER ./calc  input
-./calc.at:1411: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1411:  $PREPARSER ./calc  /dev/null
 stderr:
-./calc.at:1409: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1408: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1411: cat stderr
+  | (# + 1) = 1111
+./calc.at:1408:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
 Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 23
+syntax error: invalid character: '#'
+Next token is token error (1.1: )
+Shifting token error (1.1: )
+Entering state 11
+Next token is token error (1.1: )
+Error: discarding token error (1.1: )
 Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '/' (1.3: )
-./calc.at:1409: cat stderr
-./calc.at:1408: cat stderr
-./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-stderr:
-  | 
-  | +1
-./calc.at:1411:  $PREPARSER ./calc  input
-input:
-input:
-stderr:
-Starting parse
-Entering state 0
+Next token is token '+' (1.1: )
+Error: discarding token '+' (1.1: )
 Reading a token
 Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 23
+Error: discarding token "number" (1.1: 1)
 Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '/' (1.3: )
-Starting parse
-Entering state 0
+Next token is token ')' (1.1: )
+Entering state 11
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
 Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
+Next token is token '=' (1.1: )
+Shifting token '=' (1.1: )
+Entering state 19
 Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Cleanup: discarding lookahead token '+' (2.1: )
-./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (1 + #) = 1111
-./calc.at:1408:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
+Next token is token "number" (1.1: 1111)
+Shifting token "number" (1.1: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
+Entering state 28
 Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
+Next token is token '\n' (1.1: )
+Reducing stack 0 by rule 6 (line 82):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '=' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 77):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
 Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+Reducing stack 0 by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
 Entering state 6
 Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Cleanup: discarding lookahead token '+' (2.1: )
-  | 
-  | +1
-./calc.at:1409:  $PREPARSER ./calc  input
+Now at end of input.
+Shifting token "end of input" (1.1: )
+Entering state 17
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
+./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
 Starting parse
@@ -106091,108 +105954,75 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
+syntax error: invalid character: '#'
+Next token is token error (1.1: )
+Shifting token error (1.1: )
+Entering state 11
+Next token is token error (1.1: )
+Error: discarding token error (1.1: )
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
+Next token is token '+' (1.1: )
+Error: discarding token '+' (1.1: )
 Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
+Next token is token "number" (1.1: 1)
+Error: discarding token "number" (1.1: 1)
 Reading a token
-Next token is token ')' (1.7: )
+Next token is token ')' (1.1: )
 Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 119):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
 Entering state 8
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
+Next token is token '=' (1.1: )
+Shifting token '=' (1.1: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
+Next token is token "number" (1.1: 1111)
+Shifting token "number" (1.1: 1111)
 Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
 Entering state 28
 Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+Next token is token '\n' (1.1: )
+Reducing stack 0 by rule 6 (line 82):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '=' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
 Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
 Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
+Reducing stack 0 by rule 4 (line 77):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
 Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+Reducing stack 0 by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token "end of input" (2.1: )
+Shifting token "end of input" (1.1: )
 Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Cleanup: discarding lookahead token '+' (2.1: )
-./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1413: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1411: "$PERL" -pi -e 'use strict;
+Now at end of input.
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" (1.1: )
+./calc.at:1409: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -106202,54 +106032,27 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
+./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Cleanup: discarding lookahead token '+' (2.1: )
-Starting parse
-Entering state 0
-Reading a token
 Next token is token '(' (1.1: )
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
 Reading a token
 Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
+Error: discarding token '+' (1.4: )
 Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
+Next token is token "number" (1.6: 1)
+Error: discarding token "number" (1.6: 1)
 Reading a token
 Next token is token ')' (1.7: )
 Entering state 11
@@ -106300,86 +106103,15 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1411: cat stderr
-./calc.at:1413: cat stderr
-./calc.at:1409: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1411:  $PREPARSER ./calc  /dev/null
+./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-input:
 Starting parse
 Entering state 0
 Reading a token
 Now at end of input.
 1.1: syntax error, unexpected end of input
 Cleanup: discarding lookahead token "end of input" (1.1: )
-./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | error
 ./calc.at:1409: cat stderr
-./calc.at:1413:  $PREPARSER ./calc  input
-./calc.at:1408: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" (1.1: )
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "invalid token" (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token "invalid token" (1.1: )
-./calc.at:1408: cat stderr
-./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1409:  $PREPARSER ./calc  /dev/null
-input:
-./calc.at:1411: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "invalid token" (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token "invalid token" (1.1: )
-  | (# + 1) = 1111
-stderr:
-./calc.at:1408:  $PREPARSER ./calc  input
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" (1.1: )
-./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -106450,25 +106182,9 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1411: cat stderr
-./calc.at:1413: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" (1.1: )
+input:
+  | (1 + #) = 1111
+./calc.at:1409:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -106477,18 +106193,26 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
 Reading a token
 Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
 Reading a token
-Next token is token "number" (1.6: 1)
-Error: discarding token "number" (1.6: 1)
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
 Reading a token
 Next token is token ')' (1.7: )
 Entering state 11
@@ -106539,11 +106263,8 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1411:  $PREPARSER ./calc  input
-./calc.at:1413: cat stderr
-./calc.at:1409: "$PERL" -pi -e 'use strict;
+./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1407: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -106553,7 +106274,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1408: "$PERL" -pi -e 'use strict;
+./calc.at:1411: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -106564,7 +106285,17 @@
   }eg
 ' expout || exit 77
 stderr:
-./calc.at:1409: cat stderr
+./calc.at:1408: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1411: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -106572,234 +106303,172 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token ')' (1.2: )
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token ')' (1.2: )
-Shifting token ')' (1.2: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.2: )
--> $$ = nterm exp (1.1-2: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.6: )
-Shifting token '(' (1.6: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.7: 1)
-Shifting token "number" (1.7: 1)
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.7: 1)
--> $$ = nterm exp (1.7: 1)
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
 Entering state 12
 Reading a token
-Next token is token '+' (1.9: )
-Shifting token '+' (1.9: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.11: 1)
-Shifting token "number" (1.11: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.11: 1)
--> $$ = nterm exp (1.11: 1)
-Entering state 30
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.7: 1)
-   $2 = token '+' (1.9: )
-   $3 = nterm exp (1.11: 1)
--> $$ = nterm exp (1.7-11: 2)
-Entering state 12
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
 Entering state 21
 Reading a token
-Next token is token "number" (1.15: 1)
-Shifting token "number" (1.15: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.15: 1)
--> $$ = nterm exp (1.15: 1)
-Entering state 30
-Reading a token
-Next token is token '+' (1.17: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.7-11: 2)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15: 1)
--> $$ = nterm exp (1.7-15: 3)
-Entering state 12
-Next token is token '+' (1.17: )
-Shifting token '+' (1.17: )
-Entering state 21
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
 Reading a token
-Next token is token ')' (1.18: )
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' (1.17: )
-Error: popping nterm exp (1.7-15: 3)
-Shifting token error (1.7-18: )
+Next token is token ')' (1.7: )
 Entering state 11
-Next token is token ')' (1.18: )
-Shifting token ')' (1.18: )
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.6: )
-   $2 = token error (1.7-18: )
-   $3 = token ')' (1.18: )
--> $$ = nterm exp (1.6-18: 1111)
-Entering state 30
-Reading a token
-Next token is token '+' (1.20: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-2: 1111)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6-18: 1111)
--> $$ = nterm exp (1.1-18: 2222)
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
-Next token is token '+' (1.20: )
-Shifting token '+' (1.20: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.22: )
-Shifting token '(' (1.22: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.23: )
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.23: )
-Entering state 11
-Next token is token '*' (1.23: )
-Error: discarding token '*' (1.23: )
 Reading a token
-Next token is token '*' (1.25: )
-Error: discarding token '*' (1.25: )
-Reading a token
-Next token is token '*' (1.27: )
-Error: discarding token '*' (1.27: )
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
 Reading a token
-Next token is token ')' (1.28: )
-Entering state 11
-Next token is token ')' (1.28: )
-Shifting token ')' (1.28: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.22: )
-   $2 = token error (1.23-27: )
-   $3 = token ')' (1.28: )
--> $$ = nterm exp (1.22-28: 1111)
-Entering state 30
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
 Reading a token
-Next token is token '+' (1.30: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-18: 2222)
-   $2 = token '+' (1.20: )
-   $3 = nterm exp (1.22-28: 1111)
--> $$ = nterm exp (1.1-28: 3333)
+Next token is token '\n' (1.15-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
 Entering state 8
-Next token is token '+' (1.30: )
-Shifting token '+' (1.30: )
-Entering state 21
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
 Reading a token
-Next token is token '(' (1.32: )
-Shifting token '(' (1.32: )
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1408: cat stderr
+./calc.at:1407: cat stderr
+input:
+./calc.at:1409: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+  | (1 + # + 1) = 1111
+./calc.at:1408:  $PREPARSER ./calc  input
+./calc.at:1411:  $PREPARSER ./calc  input
+input:
+./calc.at:1409: cat stderr
+  | (1 + # + 1) = 1111
+./calc.at:1407:  $PREPARSER ./calc  input
+stderr:
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.33: 1)
-Shifting token "number" (1.33: 1)
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.33: 1)
--> $$ = nterm exp (1.33: 1)
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
 Entering state 12
 Reading a token
-Next token is token '*' (1.35: )
-Shifting token '*' (1.35: )
-Entering state 22
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
 Reading a token
-Next token is token "number" (1.37: 2)
-Shifting token "number" (1.37: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.37: 2)
--> $$ = nterm exp (1.37: 2)
-Entering state 31
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
 Reading a token
-Next token is token '*' (1.39: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (1.33: 1)
-   $2 = token '*' (1.35: )
-   $3 = nterm exp (1.37: 2)
--> $$ = nterm exp (1.33-37: 2)
-Entering state 12
-Next token is token '*' (1.39: )
-Shifting token '*' (1.39: )
-Entering state 22
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
 Reading a token
-Next token is token '*' (1.41: )
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' (1.39: )
-Error: popping nterm exp (1.33-37: 2)
-Shifting token error (1.33-41: )
-Entering state 11
-Next token is token '*' (1.41: )
-Error: discarding token '*' (1.41: )
+Next token is token "number" (1.10: 1)
+Error: discarding token "number" (1.10: 1)
 Reading a token
-Next token is token ')' (1.42: )
+Next token is token ')' (1.11: )
 Entering state 11
-Next token is token ')' (1.42: )
-Shifting token ')' (1.42: )
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.32: )
-   $2 = token error (1.33-41: )
-   $3 = token ')' (1.42: )
--> $$ = nterm exp (1.32-42: 1111)
-Entering state 30
-Reading a token
-Next token is token '=' (1.44: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-28: 3333)
-   $2 = token '+' (1.30: )
-   $3 = nterm exp (1.32-42: 1111)
--> $$ = nterm exp (1.1-42: 4444)
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
 Entering state 8
-Next token is token '=' (1.44: )
-Shifting token '=' (1.44: )
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.46: 1)
-Shifting token "number" (1.46: 1)
+Next token is token "number" (1.15-18: 1111)
+Shifting token "number" (1.15-18: 1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.46: 1)
--> $$ = nterm exp (1.46: 1)
+   $1 = token "number" (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
 Entering state 28
 Reading a token
-Next token is token '\n' (1.47-2.0: )
+Next token is token '\n' (1.19-2.0: )
 Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-42: 4444)
-   $2 = token '=' (1.44: )
-   $3 = nterm exp (1.46: 1)
-1.1-46: error: 4444 != 1
--> $$ = nterm exp (1.1-46: 4444)
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
 Entering state 8
-Next token is token '\n' (1.47-2.0: )
-Shifting token '\n' (1.47-2.0: )
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-46: 4444)
-   $2 = token '\n' (1.47-2.0: )
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 78):
@@ -106812,13 +106481,6 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1408: cat stderr
-input:
-  | 1 = 2 = 3
-stderr:
-./calc.at:1413:  $PREPARSER ./calc  input
-stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -107066,43 +106728,93 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-input:
-input:
+./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
 Next token is token "number" (1.1: 1)
 Shifting token "number" (1.1: 1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 88):
+Reducing stack 0 by rule 5 (line 81):
    $1 = token "number" (1.1: 1)
 -> $$ = nterm exp (1.1: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error (1.1: )
+Error: popping token '+' (1.1: )
+Error: popping nterm exp (1.1: 1)
+Shifting token error (1.1: )
+Entering state 11
+Next token is token error (1.1: )
+Error: discarding token error (1.1: )
+Reading a token
+Next token is token '+' (1.1: )
+Error: discarding token '+' (1.1: )
+Reading a token
+Next token is token "number" (1.1: 1)
+Error: discarding token "number" (1.1: 1)
+Reading a token
+Next token is token ')' (1.1: )
+Entering state 11
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
 Entering state 8
 Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
+Next token is token '=' (1.1: )
+Shifting token '=' (1.1: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
+Next token is token "number" (1.1: 1111)
+Shifting token "number" (1.1: 1111)
 Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
 Entering state 28
 Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Error: popping token '=' (1.3: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '=' (1.7: )
-  | (1 + # + 1) = 1111
-./calc.at:1408:  $PREPARSER ./calc  input
+Next token is token '\n' (1.1: )
+Reducing stack 0 by rule 6 (line 82):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '=' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 77):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (1.1: )
+Entering state 17
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
+./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1409:  $PREPARSER ./calc  input
-./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -107186,19 +106898,10 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+input:
+  | (# + 1) = 1111
+./calc.at:1409:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1411: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 Starting parse
 Entering state 0
 Reading a token
@@ -107213,7 +106916,7 @@
 Next token is token ')' (1.2: )
 Shifting token ')' (1.2: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.1: )
    $2 = token error (1.2: )
    $3 = token ')' (1.2: )
@@ -107249,7 +106952,7 @@
 Entering state 30
 Reading a token
 Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 105):
+Reducing stack 0 by rule 7 (line 99):
    $1 = nterm exp (1.7: 1)
    $2 = token '+' (1.9: )
    $3 = nterm exp (1.11: 1)
@@ -107268,7 +106971,7 @@
 Entering state 30
 Reading a token
 Next token is token '+' (1.17: )
-Reducing stack 0 by rule 7 (line 105):
+Reducing stack 0 by rule 7 (line 99):
    $1 = nterm exp (1.7-11: 2)
    $2 = token '+' (1.13: )
    $3 = nterm exp (1.15: 1)
@@ -107287,7 +106990,7 @@
 Next token is token ')' (1.18: )
 Shifting token ')' (1.18: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.6: )
    $2 = token error (1.7-18: )
    $3 = token ')' (1.18: )
@@ -107295,7 +106998,7 @@
 Entering state 30
 Reading a token
 Next token is token '+' (1.20: )
-Reducing stack 0 by rule 7 (line 105):
+Reducing stack 0 by rule 7 (line 99):
    $1 = nterm exp (1.1-2: 1111)
    $2 = token '+' (1.4: )
    $3 = nterm exp (1.6-18: 1111)
@@ -107327,7 +107030,7 @@
 Next token is token ')' (1.28: )
 Shifting token ')' (1.28: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.22: )
    $2 = token error (1.23-27: )
    $3 = token ')' (1.28: )
@@ -107335,7 +107038,7 @@
 Entering state 30
 Reading a token
 Next token is token '+' (1.30: )
-Reducing stack 0 by rule 7 (line 105):
+Reducing stack 0 by rule 7 (line 99):
    $1 = nterm exp (1.1-18: 2222)
    $2 = token '+' (1.20: )
    $3 = nterm exp (1.22-28: 1111)
@@ -107370,7 +107073,7 @@
 Entering state 31
 Reading a token
 Next token is token '*' (1.39: )
-Reducing stack 0 by rule 9 (line 107):
+Reducing stack 0 by rule 9 (line 101):
    $1 = nterm exp (1.33: 1)
    $2 = token '*' (1.35: )
    $3 = nterm exp (1.37: 2)
@@ -107394,7 +107097,7 @@
 Next token is token ')' (1.42: )
 Shifting token ')' (1.42: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.32: )
    $2 = token error (1.33-41: )
    $3 = token ')' (1.42: )
@@ -107402,7 +107105,7 @@
 Entering state 30
 Reading a token
 Next token is token '=' (1.44: )
-Reducing stack 0 by rule 7 (line 105):
+Reducing stack 0 by rule 7 (line 99):
    $1 = nterm exp (1.1-28: 3333)
    $2 = token '+' (1.30: )
    $3 = nterm exp (1.32-42: 1111)
@@ -107446,37 +107149,101 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+./calc.at:1408: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
 Next token is token "number" (1.1: 1)
 Shifting token "number" (1.1: 1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 88):
+Reducing stack 0 by rule 5 (line 81):
    $1 = token "number" (1.1: 1)
 -> $$ = nterm exp (1.1: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error (1.1: )
+Error: popping token '+' (1.1: )
+Error: popping nterm exp (1.1: 1)
+Shifting token error (1.1: )
+Entering state 11
+Next token is token error (1.1: )
+Error: discarding token error (1.1: )
+Reading a token
+Next token is token '+' (1.1: )
+Error: discarding token '+' (1.1: )
+Reading a token
+Next token is token "number" (1.1: 1)
+Error: discarding token "number" (1.1: 1)
+Reading a token
+Next token is token ')' (1.1: )
+Entering state 11
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 119):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1: )
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 1111)
 Entering state 8
 Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
+Next token is token '=' (1.1: )
+Shifting token '=' (1.1: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
+Next token is token "number" (1.1: 1111)
+Shifting token "number" (1.1: 1111)
 Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
 Entering state 28
 Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Error: popping token '=' (1.3: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '=' (1.7: )
-stderr:
+Next token is token '\n' (1.1: )
+Reducing stack 0 by rule 6 (line 82):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '=' (1.1: )
+   $3 = nterm exp (1.1: 1111)
+-> $$ = nterm exp (1.1: 1111)
+Entering state 8
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 77):
+   $1 = nterm exp (1.1: 1111)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (1.1: )
+Entering state 17
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
 Starting parse
 Entering state 0
 Reading a token
@@ -107484,70 +107251,56 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
 Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
 Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
 Reading a token
-Next token is token "number" (1.10: 1)
-Error: discarding token "number" (1.10: 1)
+Next token is token "number" (1.6: 1)
+Error: discarding token "number" (1.6: 1)
 Reading a token
-Next token is token ')' (1.11: )
+Next token is token ')' (1.7: )
 Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
 Entering state 26
 Reducing stack 0 by rule 14 (line 126):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
 Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.15-18: 1111)
-Shifting token "number" (1.15-18: 1111)
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
 Entering state 28
 Reading a token
-Next token is token '\n' (1.19-2.0: )
+Next token is token '\n' (1.15-2.0: )
 Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
 Entering state 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 78):
@@ -107560,6 +107313,7 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -107568,234 +107322,56 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token ')' (1.2: )
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
 Shifting token error (1.2: )
 Entering state 11
-Next token is token ')' (1.2: )
-Shifting token ')' (1.2: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 126):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.2: )
--> $$ = nterm exp (1.1-2: 1111)
-Entering state 8
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
 Reading a token
 Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.6: )
-Shifting token '(' (1.6: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.7: 1)
-Shifting token "number" (1.7: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.7: 1)
--> $$ = nterm exp (1.7: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.9: )
-Shifting token '+' (1.9: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.11: 1)
-Shifting token "number" (1.11: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.11: 1)
--> $$ = nterm exp (1.11: 1)
-Entering state 30
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 105):
-   $1 = nterm exp (1.7: 1)
-   $2 = token '+' (1.9: )
-   $3 = nterm exp (1.11: 1)
--> $$ = nterm exp (1.7-11: 2)
-Entering state 12
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.15: 1)
-Shifting token "number" (1.15: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.15: 1)
--> $$ = nterm exp (1.15: 1)
-Entering state 30
-Reading a token
-Next token is token '+' (1.17: )
-Reducing stack 0 by rule 7 (line 105):
-   $1 = nterm exp (1.7-11: 2)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15: 1)
--> $$ = nterm exp (1.7-15: 3)
-Entering state 12
-Next token is token '+' (1.17: )
-Shifting token '+' (1.17: )
-Entering state 21
-Reading a token
-Next token is token ')' (1.18: )
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' (1.17: )
-Error: popping nterm exp (1.7-15: 3)
-Shifting token error (1.7-18: )
-Entering state 11
-Next token is token ')' (1.18: )
-Shifting token ')' (1.18: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 126):
-   $1 = token '(' (1.6: )
-   $2 = token error (1.7-18: )
-   $3 = token ')' (1.18: )
--> $$ = nterm exp (1.6-18: 1111)
-Entering state 30
-Reading a token
-Next token is token '+' (1.20: )
-Reducing stack 0 by rule 7 (line 105):
-   $1 = nterm exp (1.1-2: 1111)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6-18: 1111)
--> $$ = nterm exp (1.1-18: 2222)
-Entering state 8
-Next token is token '+' (1.20: )
-Shifting token '+' (1.20: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.22: )
-Shifting token '(' (1.22: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.23: )
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.23: )
-Entering state 11
-Next token is token '*' (1.23: )
-Error: discarding token '*' (1.23: )
-Reading a token
-Next token is token '*' (1.25: )
-Error: discarding token '*' (1.25: )
+Error: discarding token '+' (1.4: )
 Reading a token
-Next token is token '*' (1.27: )
-Error: discarding token '*' (1.27: )
+Next token is token "number" (1.6: 1)
+Error: discarding token "number" (1.6: 1)
 Reading a token
-Next token is token ')' (1.28: )
+Next token is token ')' (1.7: )
 Entering state 11
-Next token is token ')' (1.28: )
-Shifting token ')' (1.28: )
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
 Entering state 26
 Reducing stack 0 by rule 14 (line 126):
-   $1 = token '(' (1.22: )
-   $2 = token error (1.23-27: )
-   $3 = token ')' (1.28: )
--> $$ = nterm exp (1.22-28: 1111)
-Entering state 30
-Reading a token
-Next token is token '+' (1.30: )
-Reducing stack 0 by rule 7 (line 105):
-   $1 = nterm exp (1.1-18: 2222)
-   $2 = token '+' (1.20: )
-   $3 = nterm exp (1.22-28: 1111)
--> $$ = nterm exp (1.1-28: 3333)
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
-Next token is token '+' (1.30: )
-Shifting token '+' (1.30: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.32: )
-Shifting token '(' (1.32: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.33: 1)
-Shifting token "number" (1.33: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.33: 1)
--> $$ = nterm exp (1.33: 1)
-Entering state 12
-Reading a token
-Next token is token '*' (1.35: )
-Shifting token '*' (1.35: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.37: 2)
-Shifting token "number" (1.37: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.37: 2)
--> $$ = nterm exp (1.37: 2)
-Entering state 31
-Reading a token
-Next token is token '*' (1.39: )
-Reducing stack 0 by rule 9 (line 107):
-   $1 = nterm exp (1.33: 1)
-   $2 = token '*' (1.35: )
-   $3 = nterm exp (1.37: 2)
--> $$ = nterm exp (1.33-37: 2)
-Entering state 12
-Next token is token '*' (1.39: )
-Shifting token '*' (1.39: )
-Entering state 22
-Reading a token
-Next token is token '*' (1.41: )
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' (1.39: )
-Error: popping nterm exp (1.33-37: 2)
-Shifting token error (1.33-41: )
-Entering state 11
-Next token is token '*' (1.41: )
-Error: discarding token '*' (1.41: )
-Reading a token
-Next token is token ')' (1.42: )
-Entering state 11
-Next token is token ')' (1.42: )
-Shifting token ')' (1.42: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 126):
-   $1 = token '(' (1.32: )
-   $2 = token error (1.33-41: )
-   $3 = token ')' (1.42: )
--> $$ = nterm exp (1.32-42: 1111)
-Entering state 30
 Reading a token
-Next token is token '=' (1.44: )
-Reducing stack 0 by rule 7 (line 105):
-   $1 = nterm exp (1.1-28: 3333)
-   $2 = token '+' (1.30: )
-   $3 = nterm exp (1.32-42: 1111)
--> $$ = nterm exp (1.1-42: 4444)
-Entering state 8
-Next token is token '=' (1.44: )
-Shifting token '=' (1.44: )
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.46: 1)
-Shifting token "number" (1.46: 1)
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.46: 1)
--> $$ = nterm exp (1.46: 1)
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
 Entering state 28
 Reading a token
-Next token is token '\n' (1.47-2.0: )
+Next token is token '\n' (1.15-2.0: )
 Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-42: 4444)
-   $2 = token '=' (1.44: )
-   $3 = nterm exp (1.46: 1)
-1.1-46: error: 4444 != 1
--> $$ = nterm exp (1.1-46: 4444)
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
 Entering state 8
-Next token is token '\n' (1.47-2.0: )
-Shifting token '\n' (1.47-2.0: )
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-46: 4444)
-   $2 = token '\n' (1.47-2.0: )
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 78):
@@ -107808,7 +107384,7 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1408: "$PERL" -pi -e 'use strict;
+./calc.at:1411: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -107818,8 +107394,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1408: cat stderr
 ./calc.at:1411: cat stderr
-./calc.at:1413: "$PERL" -pi -e 'use strict;
+./calc.at:1409: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -107829,8 +107406,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1413: cat stderr
-./calc.at:1409: "$PERL" -pi -e 'use strict;
+./calc.at:1407: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -107840,14 +107416,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1408: cat stderr
-input:
-./calc.at:1409: cat stderr
-  | (!!) + (1 2) = 1
-./calc.at:1411:  $PREPARSER ./calc  input
-input:
 input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1408:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1409: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -107855,75 +107428,62 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token '!' (1.2: )
-Shifting token '!' (1.2: )
-Entering state 5
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
 Reading a token
-Next token is token '!' (1.3: )
-Shifting token '!' (1.3: )
-Entering state 16
-Reducing stack 0 by rule 16 (line 116):
-   $1 = token '!' (1.2: )
-   $2 = token '!' (1.3: )
-Shifting token error (1.2-3: )
-Entering state 11
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
 Reading a token
-Next token is token ')' (1.4: )
-Shifting token ')' (1.4: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
+Next token is token "number" (1.6: 1)
+Shifting token "number" (1.6: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 30
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 125):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-3: )
-   $3 = token ')' (1.4: )
--> $$ = nterm exp (1.1-4: 1111)
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
 Entering state 8
 Reading a token
-Next token is token '+' (1.6: )
-Shifting token '+' (1.6: )
-Entering state 21
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 23
 Reading a token
-Next token is token '(' (1.8: )
-Shifting token '(' (1.8: )
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.9: 1)
-Shifting token "number" (1.9: 1)
+Next token is token "number" (1.12: 1)
+Shifting token "number" (1.12: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 1)
--> $$ = nterm exp (1.9: 1)
+   $1 = token "number" (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
 Entering state 12
 Reading a token
-Next token is token "number" (1.11: 2)
-1.11: syntax error, unexpected number
-Error: popping nterm exp (1.9: 1)
-Shifting token error (1.9-11: )
-Entering state 11
-Next token is token "number" (1.11: 2)
-Error: discarding token "number" (1.11: 2)
-Reading a token
-Next token is token ')' (1.12: )
-Entering state 11
-Next token is token ')' (1.12: )
-Shifting token ')' (1.12: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.8: )
-   $2 = token error (1.9-11: )
-   $3 = token ')' (1.12: )
--> $$ = nterm exp (1.8-12: 1111)
-Entering state 30
-Reading a token
-Next token is token '=' (1.14: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-4: 1111)
-   $2 = token '+' (1.6: )
-   $3 = nterm exp (1.8-12: 1111)
--> $$ = nterm exp (1.1-12: 2222)
-Entering state 8
-Next token is token '=' (1.14: )
-Shifting token '=' (1.14: )
-Entering state 19
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 20
 Reading a token
 Next token is token "number" (1.16: 1)
 Shifting token "number" (1.16: 1)
@@ -107931,22 +107491,39 @@
 Reducing stack 0 by rule 5 (line 88):
    $1 = token "number" (1.16: 1)
 -> $$ = nterm exp (1.16: 1)
-Entering state 28
+Entering state 29
 Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-12: 2222)
-   $2 = token '=' (1.14: )
+Next token is token ')' (1.17: )
+Reducing stack 0 by rule 8 (line 106):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
    $3 = nterm exp (1.16: 1)
-1.1-16: error: 2222 != 1
--> $$ = nterm exp (1.1-16: 2222)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 125):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 32
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 10 (line 108):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
 Entering state 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 78):
@@ -107959,37 +107536,17 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 
-  | +1
-  | (1 + 1) / (1 - 1)
+./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1407: cat stderr
+input:
 input:
-./calc.at:1413:  $PREPARSER ./calc  input
   | (!!) + (1 2) = 1
-./calc.at:1408:  $PREPARSER ./calc  input
+./calc.at:1411:  $PREPARSER ./calc  input
+  | (1 + # + 1) = 1111
 ./calc.at:1409:  $PREPARSER ./calc  input
 stderr:
 stderr:
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Cleanup: discarding lookahead token '+' (2.1: )
+input:
 Starting parse
 Entering state 0
 Reading a token
@@ -108105,7 +107662,7 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -108120,7 +107677,7 @@
 Next token is token '!' (1.3: )
 Shifting token '!' (1.3: )
 Entering state 16
-Reducing stack 0 by rule 16 (line 128):
+Reducing stack 0 by rule 16 (line 116):
    $1 = token '!' (1.2: )
    $2 = token '!' (1.3: )
 Shifting token error (1.2-3: )
@@ -108129,7 +107686,7 @@
 Next token is token ')' (1.4: )
 Shifting token ')' (1.4: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.1: )
    $2 = token error (1.2-3: )
    $3 = token ')' (1.4: )
@@ -108165,7 +107722,7 @@
 Next token is token ')' (1.12: )
 Shifting token ')' (1.12: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.8: )
    $2 = token error (1.9-11: )
    $3 = token ')' (1.12: )
@@ -108173,7 +107730,7 @@
 Entering state 30
 Reading a token
 Next token is token '=' (1.14: )
-Reducing stack 0 by rule 7 (line 105):
+Reducing stack 0 by rule 7 (line 99):
    $1 = nterm exp (1.1-4: 1111)
    $2 = token '+' (1.6: )
    $3 = nterm exp (1.8-12: 1111)
@@ -108217,9 +107774,95 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-stderr:
+./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Reading a token
+Next token is token "number" (1.10: 1)
+Error: discarding token "number" (1.10: 1)
+Reading a token
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.15-18: 1111)
+Shifting token "number" (1.15-18: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (1 + 1) / (1 - 1)
+./calc.at:1407:  $PREPARSER ./calc  input
+stderr:
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -108332,6 +107975,358 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 30
+Reading a token
+Next token is token ')' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1)
+-> $$ = nterm exp (1.1: 2)
+Entering state 12
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 118):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.1: 2)
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 2)
+Entering state 8
+Reading a token
+Next token is token '/' (1.1: )
+Shifting token '/' (1.1: )
+Entering state 23
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 12
+Reading a token
+Next token is token '-' (1.1: )
+Shifting token '-' (1.1: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 29
+Reading a token
+Next token is token ')' (1.1: )
+Reducing stack 0 by rule 8 (line 99):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '-' (1.1: )
+   $3 = nterm exp (1.1: 1)
+-> $$ = nterm exp (1.1: 0)
+Entering state 12
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 118):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.1: 0)
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 0)
+Entering state 32
+Reading a token
+Next token is token '\n' (1.1: )
+Reducing stack 0 by rule 10 (line 101):
+   $1 = nterm exp (1.1: 2)
+   $2 = token '/' (1.1: )
+   $3 = nterm exp (1.1: 0)
+error: null divisor
+-> $$ = nterm exp (1.1: 2)
+Entering state 8
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 77):
+   $1 = nterm exp (1.1: 2)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (1.1: )
+Entering state 17
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
+./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Reading a token
+Next token is token "number" (1.10: 1)
+Error: discarding token "number" (1.10: 1)
+Reading a token
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 126):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.15-18: 1111)
+Shifting token "number" (1.15-18: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1408: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1408: cat stderr
+stderr:
+./calc.at:1411: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.1: )
+Shifting token '+' (1.1: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 30
+Reading a token
+Next token is token ')' (1.1: )
+Reducing stack 0 by rule 7 (line 98):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.1: )
+   $3 = nterm exp (1.1: 1)
+-> $$ = nterm exp (1.1: 2)
+Entering state 12
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 118):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.1: 2)
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 2)
+Entering state 8
+Reading a token
+Next token is token '/' (1.1: )
+Shifting token '/' (1.1: )
+Entering state 23
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 12
+Reading a token
+Next token is token '-' (1.1: )
+Shifting token '-' (1.1: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 81):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 29
+Reading a token
+Next token is token ')' (1.1: )
+Reducing stack 0 by rule 8 (line 99):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '-' (1.1: )
+   $3 = nterm exp (1.1: 1)
+-> $$ = nterm exp (1.1: 0)
+Entering state 12
+Next token is token ')' (1.1: )
+Shifting token ')' (1.1: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 118):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.1: 0)
+   $3 = token ')' (1.1: )
+-> $$ = nterm exp (1.1: 0)
+Entering state 32
+Reading a token
+Next token is token '\n' (1.1: )
+Reducing stack 0 by rule 10 (line 101):
+   $1 = nterm exp (1.1: 2)
+   $2 = token '/' (1.1: )
+   $3 = nterm exp (1.1: 0)
+error: null divisor
+-> $$ = nterm exp (1.1: 2)
+Entering state 8
+Next token is token '\n' (1.1: )
+Shifting token '\n' (1.1: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 77):
+   $1 = nterm exp (1.1: 2)
+   $2 = token '\n' (1.1: )
+-> $$ = nterm line (1.1: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 71):
+   $1 = nterm line (1.1: )
+-> $$ = nterm input (1.1: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (1.1: )
+Entering state 17
+Cleanup: popping token "end of input" (1.1: )
+Cleanup: popping nterm input (1.1: )
+./calc.at:1409: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+516. calc.at:1408:  ok
+./calc.at:1411: cat stderr
+./calc.at:1409: cat stderr
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1409:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -108448,6 +108443,32 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stdout:
+
+./calc.at:1407: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+./calc.at:1413: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c calc.h
+
 Starting parse
 Entering state 0
 Reading a token
@@ -108455,75 +108476,62 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token '!' (1.2: )
-Shifting token '!' (1.2: )
-Entering state 5
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
 Reading a token
-Next token is token '!' (1.3: )
-Shifting token '!' (1.3: )
-Entering state 16
-Reducing stack 0 by rule 16 (line 128):
-   $1 = token '!' (1.2: )
-   $2 = token '!' (1.3: )
-Shifting token error (1.2-3: )
-Entering state 11
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
 Reading a token
-Next token is token ')' (1.4: )
-Shifting token ')' (1.4: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Next token is token "number" (1.6: 1)
+Shifting token "number" (1.6: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 30
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack 0 by rule 7 (line 105):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 125):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-3: )
-   $3 = token ')' (1.4: )
--> $$ = nterm exp (1.1-4: 1111)
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
 Entering state 8
 Reading a token
-Next token is token '+' (1.6: )
-Shifting token '+' (1.6: )
-Entering state 21
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 23
 Reading a token
-Next token is token '(' (1.8: )
-Shifting token '(' (1.8: )
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.9: 1)
-Shifting token "number" (1.9: 1)
+Next token is token "number" (1.12: 1)
+Shifting token "number" (1.12: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 1)
--> $$ = nterm exp (1.9: 1)
+   $1 = token "number" (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
 Entering state 12
 Reading a token
-Next token is token "number" (1.11: 2)
-1.11: syntax error, unexpected number
-Error: popping nterm exp (1.9: 1)
-Shifting token error (1.9-11: )
-Entering state 11
-Next token is token "number" (1.11: 2)
-Error: discarding token "number" (1.11: 2)
-Reading a token
-Next token is token ')' (1.12: )
-Entering state 11
-Next token is token ')' (1.12: )
-Shifting token ')' (1.12: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 126):
-   $1 = token '(' (1.8: )
-   $2 = token error (1.9-11: )
-   $3 = token ')' (1.12: )
--> $$ = nterm exp (1.8-12: 1111)
-Entering state 30
-Reading a token
-Next token is token '=' (1.14: )
-Reducing stack 0 by rule 7 (line 105):
-   $1 = nterm exp (1.1-4: 1111)
-   $2 = token '+' (1.6: )
-   $3 = nterm exp (1.8-12: 1111)
--> $$ = nterm exp (1.1-12: 2222)
-Entering state 8
-Next token is token '=' (1.14: )
-Shifting token '=' (1.14: )
-Entering state 19
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 20
 Reading a token
 Next token is token "number" (1.16: 1)
 Shifting token "number" (1.16: 1)
@@ -108531,22 +108539,39 @@
 Reducing stack 0 by rule 5 (line 88):
    $1 = token "number" (1.16: 1)
 -> $$ = nterm exp (1.16: 1)
-Entering state 28
+Entering state 29
 Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-12: 2222)
-   $2 = token '=' (1.14: )
+Next token is token ')' (1.17: )
+Reducing stack 0 by rule 8 (line 106):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
    $3 = nterm exp (1.16: 1)
-1.1-16: error: 2222 != 1
--> $$ = nterm exp (1.1-16: 2222)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 125):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 32
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 10 (line 108):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
 Entering state 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 78):
@@ -108559,90 +108584,14 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Cleanup: discarding lookahead token '+' (2.1: )
-./calc.at:1409: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1411: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1408: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1411: cat stderr
-./calc.at:1413: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1408: cat stderr
-./calc.at:1409: cat stderr
-516. calc.at:1408:  ok
-./calc.at:1413: cat stderr
-input:
 input:
   | (- *) + (1 2) = 1
 ./calc.at:1411:  $PREPARSER ./calc  input
-  | (- *) + (1 2) = 1
-./calc.at:1413:  $PREPARSER ./calc  /dev/null
-./calc.at:1409:  $PREPARSER ./calc  input
-stderr:
-stderr:
+input:
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" (1.1: )
-Starting parse
-Entering state 0
-Reading a token
 Next token is token '(' (1.1: )
 Shifting token '(' (1.1: )
 Entering state 4
@@ -108757,7 +108706,35 @@
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1413:  $PREPARSER ./calc  input
+./calc.at:1409: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1407: cat stderr
+515. calc.at:1407:  ok
+stderr:
+stderr:
+./calc.at:1409: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -108773,7 +108750,7 @@
 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 Shifting token error (1.4: )
 Entering state 9
-Reducing stack 0 by rule 15 (line 127):
+Reducing stack 0 by rule 15 (line 115):
    $1 = token '-' (1.2: )
    $2 = token error (1.4: )
 Shifting token error (1.2-4: )
@@ -108786,7 +108763,7 @@
 Next token is token ')' (1.5: )
 Shifting token ')' (1.5: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.1: )
    $2 = token error (1.2-4: )
    $3 = token ')' (1.5: )
@@ -108822,7 +108799,7 @@
 Next token is token ')' (1.13: )
 Shifting token ')' (1.13: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.9: )
    $2 = token error (1.10-12: )
    $3 = token ')' (1.13: )
@@ -108830,7 +108807,7 @@
 Entering state 30
 Reading a token
 Next token is token '=' (1.15: )
-Reducing stack 0 by rule 7 (line 105):
+Reducing stack 0 by rule 7 (line 99):
    $1 = nterm exp (1.1-5: 1111)
    $2 = token '+' (1.7: )
    $3 = nterm exp (1.9-13: 1111)
@@ -108874,114 +108851,80 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
-Entering state 2
-Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Reducing stack 0 by rule 15 (line 115):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
-Shifting token error (1.2-4: )
-Entering state 11
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Reading a token
-Next token is token ')' (1.5: )
-Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
 Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
 Entering state 21
 Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.10: 1)
-Shifting token "number" (1.10: 1)
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.10: 1)
--> $$ = nterm exp (1.10: 1)
-Entering state 12
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
 Reading a token
-Next token is token "number" (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Shifting token error (1.10-12: )
-Entering state 11
-Next token is token "number" (1.12: 2)
-Error: discarding token "number" (1.12: 2)
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
 Reading a token
-Next token is token ')' (1.13: )
-Entering state 11
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
-Entering state 30
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
 Reading a token
-Next token is token '=' (1.15: )
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '=' (1.11: )
 Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.17: 1)
-Shifting token "number" (1.17: 1)
+Next token is token "number" (1.13: 7)
+Shifting token "number" (1.13: 7)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.17: 1)
--> $$ = nterm exp (1.17: 1)
+   $1 = token "number" (1.13: 7)
+-> $$ = nterm exp (1.13: 7)
 Entering state 28
 Reading a token
-Next token is token '\n' (1.18-2.0: )
+Next token is token '\n' (1.14-2.0: )
 Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
+   $1 = nterm exp (1.1-9: 7)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13: 7)
+-> $$ = nterm exp (1.1-13: 7)
 Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (1.14-2.0: )
+Shifting token '\n' (1.14-2.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-17: 2222)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-13: 7)
+   $2 = token '\n' (1.14-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 78):
@@ -108989,134 +108932,764 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-stderr:
-Starting parse
-Entering state 0
+Next token is token "number" (2.1: 1)
+Shifting token "number" (2.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (2.1: 1)
+-> $$ = nterm exp (2.1: 1)
+Entering state 8
 Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" (1.1: )
-Starting parse
-Entering state 0
+Next token is token '+' (2.3: )
+Shifting token '+' (2.3: )
+Entering state 21
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
+Next token is token "number" (2.5: 2)
+Shifting token "number" (2.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (2.5: 2)
+-> $$ = nterm exp (2.5: 2)
+Entering state 30
+Reading a token
+Next token is token '*' (2.7: )
+Shifting token '*' (2.7: )
+Entering state 22
+Reading a token
+Next token is token '-' (2.9: )
+Shifting token '-' (2.9: )
+Entering state 2
+Reading a token
+Next token is token "number" (2.10: 3)
+Shifting token "number" (2.10: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (2.10: 3)
+-> $$ = nterm exp (2.10: 3)
+Entering state 10
+Reading a token
+Next token is token '=' (2.12: )
+Reducing stack 0 by rule 11 (line 111):
+   $1 = token '-' (2.9: )
+   $2 = nterm exp (2.10: 3)
+-> $$ = nterm exp (2.9-10: -3)
+Entering state 31
+Next token is token '=' (2.12: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (2.5: 2)
+   $2 = token '*' (2.7: )
+   $3 = nterm exp (2.9-10: -3)
+-> $$ = nterm exp (2.5-10: -6)
+Entering state 30
+Next token is token '=' (2.12: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (2.1: 1)
+   $2 = token '+' (2.3: )
+   $3 = nterm exp (2.5-10: -6)
+-> $$ = nterm exp (2.1-10: -5)
+Entering state 8
+Next token is token '=' (2.12: )
+Shifting token '=' (2.12: )
+Entering state 19
+Reading a token
+Next token is token '-' (2.14: )
+Shifting token '-' (2.14: )
+Entering state 2
+Reading a token
+Next token is token "number" (2.15: 5)
+Shifting token "number" (2.15: 5)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (2.15: 5)
+-> $$ = nterm exp (2.15: 5)
+Entering state 10
+Reading a token
+Next token is token '\n' (2.16-3.0: )
+Reducing stack 0 by rule 11 (line 111):
+   $1 = token '-' (2.14: )
+   $2 = nterm exp (2.15: 5)
+-> $$ = nterm exp (2.14-15: -5)
+Entering state 28
+Next token is token '\n' (2.16-3.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (2.1-10: -5)
+   $2 = token '=' (2.12: )
+   $3 = nterm exp (2.14-15: -5)
+-> $$ = nterm exp (2.1-15: -5)
+Entering state 8
+Next token is token '\n' (2.16-3.0: )
+Shifting token '\n' (2.16-3.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (2.1-15: -5)
+   $2 = token '\n' (2.16-3.0: )
+-> $$ = nterm line (2.1-3.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-2.0: )
+   $2 = nterm line (2.1-3.0: )
+-> $$ = nterm input (1.1-3.0: )
+Entering state 6
+Reading a token
+Next token is token '\n' (3.1-4.0: )
+Shifting token '\n' (3.1-4.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (3.1-4.0: )
+-> $$ = nterm line (3.1-4.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-3.0: )
+   $2 = nterm line (3.1-4.0: )
+-> $$ = nterm input (1.1-4.0: )
+Entering state 6
+Reading a token
+Next token is token '-' (4.1: )
+Shifting token '-' (4.1: )
+Entering state 2
+Reading a token
+Next token is token "number" (4.2: 1)
+Shifting token "number" (4.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (4.2: 1)
+-> $$ = nterm exp (4.2: 1)
+Entering state 10
+Reading a token
+Next token is token '^' (4.3: )
+Shifting token '^' (4.3: )
+Entering state 24
+Reading a token
+Next token is token "number" (4.4: 2)
+Shifting token "number" (4.4: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (4.4: 2)
+-> $$ = nterm exp (4.4: 2)
+Entering state 33
+Reading a token
+Next token is token '=' (4.6: )
+Reducing stack 0 by rule 12 (line 112):
+   $1 = nterm exp (4.2: 1)
+   $2 = token '^' (4.3: )
+   $3 = nterm exp (4.4: 2)
+-> $$ = nterm exp (4.2-4: 1)
+Entering state 10
+Next token is token '=' (4.6: )
+Reducing stack 0 by rule 11 (line 111):
+   $1 = token '-' (4.1: )
+   $2 = nterm exp (4.2-4: 1)
+-> $$ = nterm exp (4.1-4: -1)
+Entering state 8
+Next token is token '=' (4.6: )
+Shifting token '=' (4.6: )
+Entering state 19
+Reading a token
+Next token is token '-' (4.8: )
+Shifting token '-' (4.8: )
+Entering state 2
+Reading a token
+Next token is token "number" (4.9: 1)
+Shifting token "number" (4.9: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (4.9: 1)
+-> $$ = nterm exp (4.9: 1)
+Entering state 10
+Reading a token
+Next token is token '\n' (4.10-5.0: )
+Reducing stack 0 by rule 11 (line 111):
+   $1 = token '-' (4.8: )
+   $2 = nterm exp (4.9: 1)
+-> $$ = nterm exp (4.8-9: -1)
+Entering state 28
+Next token is token '\n' (4.10-5.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (4.1-4: -1)
+   $2 = token '=' (4.6: )
+   $3 = nterm exp (4.8-9: -1)
+-> $$ = nterm exp (4.1-9: -1)
+Entering state 8
+Next token is token '\n' (4.10-5.0: )
+Shifting token '\n' (4.10-5.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (4.1-9: -1)
+   $2 = token '\n' (4.10-5.0: )
+-> $$ = nterm line (4.1-5.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-4.0: )
+   $2 = nterm line (4.1-5.0: )
+-> $$ = nterm input (1.1-5.0: )
+Entering state 6
+Reading a token
+Next token is token '(' (5.1: )
+Shifting token '(' (5.1: )
 Entering state 4
 Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
+Next token is token '-' (5.2: )
+Shifting token '-' (5.2: )
 Entering state 2
 Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Reducing stack 0 by rule 15 (line 127):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
-Shifting token error (1.2-4: )
-Entering state 11
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
+Next token is token "number" (5.3: 1)
+Shifting token "number" (5.3: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (5.3: 1)
+-> $$ = nterm exp (5.3: 1)
+Entering state 10
 Reading a token
-Next token is token ')' (1.5: )
-Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 126):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
+Next token is token ')' (5.4: )
+Reducing stack 0 by rule 11 (line 111):
+   $1 = token '-' (5.2: )
+   $2 = nterm exp (5.3: 1)
+-> $$ = nterm exp (5.2-3: -1)
+Entering state 12
+Next token is token ')' (5.4: )
+Shifting token ')' (5.4: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 113):
+   $1 = token '(' (5.1: )
+   $2 = nterm exp (5.2-3: -1)
+   $3 = token ')' (5.4: )
+-> $$ = nterm exp (5.1-4: -1)
 Entering state 8
 Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
+Next token is token '^' (5.5: )
+Shifting token '^' (5.5: )
+Entering state 24
 Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
+Next token is token "number" (5.6: 2)
+Shifting token "number" (5.6: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (5.6: 2)
+-> $$ = nterm exp (5.6: 2)
+Entering state 33
+Reading a token
+Next token is token '=' (5.8: )
+Reducing stack 0 by rule 12 (line 112):
+   $1 = nterm exp (5.1-4: -1)
+   $2 = token '^' (5.5: )
+   $3 = nterm exp (5.6: 2)
+-> $$ = nterm exp (5.1-6: 1)
+Entering state 8
+Next token is token '=' (5.8: )
+Shifting token '=' (5.8: )
+Entering state 19
+Reading a token
+Next token is token "number" (5.10: 1)
+Shifting token "number" (5.10: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (5.10: 1)
+-> $$ = nterm exp (5.10: 1)
+Entering state 28
+Reading a token
+Next token is token '\n' (5.11-6.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (5.1-6: 1)
+   $2 = token '=' (5.8: )
+   $3 = nterm exp (5.10: 1)
+-> $$ = nterm exp (5.1-10: 1)
+Entering state 8
+Next token is token '\n' (5.11-6.0: )
+Shifting token '\n' (5.11-6.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (5.1-10: 1)
+   $2 = token '\n' (5.11-6.0: )
+-> $$ = nterm line (5.1-6.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-5.0: )
+   $2 = nterm line (5.1-6.0: )
+-> $$ = nterm input (1.1-6.0: )
+Entering state 6
+Reading a token
+Next token is token '\n' (6.1-7.0: )
+Shifting token '\n' (6.1-7.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (6.1-7.0: )
+-> $$ = nterm line (6.1-7.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-6.0: )
+   $2 = nterm line (6.1-7.0: )
+-> $$ = nterm input (1.1-7.0: )
+Entering state 6
+Reading a token
+Next token is token '-' (7.1: )
+Shifting token '-' (7.1: )
+Entering state 2
+Reading a token
+Next token is token '-' (7.2: )
+Shifting token '-' (7.2: )
+Entering state 2
+Reading a token
+Next token is token '-' (7.3: )
+Shifting token '-' (7.3: )
+Entering state 2
+Reading a token
+Next token is token "number" (7.4: 1)
+Shifting token "number" (7.4: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (7.4: 1)
+-> $$ = nterm exp (7.4: 1)
+Entering state 10
+Reading a token
+Next token is token '=' (7.6: )
+Reducing stack 0 by rule 11 (line 111):
+   $1 = token '-' (7.3: )
+   $2 = nterm exp (7.4: 1)
+-> $$ = nterm exp (7.3-4: -1)
+Entering state 10
+Next token is token '=' (7.6: )
+Reducing stack 0 by rule 11 (line 111):
+   $1 = token '-' (7.2: )
+   $2 = nterm exp (7.3-4: -1)
+-> $$ = nterm exp (7.2-4: 1)
+Entering state 10
+Next token is token '=' (7.6: )
+Reducing stack 0 by rule 11 (line 111):
+   $1 = token '-' (7.1: )
+   $2 = nterm exp (7.2-4: 1)
+-> $$ = nterm exp (7.1-4: -1)
+Entering state 8
+Next token is token '=' (7.6: )
+Shifting token '=' (7.6: )
+Entering state 19
+Reading a token
+Next token is token '-' (7.8: )
+Shifting token '-' (7.8: )
+Entering state 2
+Reading a token
+Next token is token "number" (7.9: 1)
+Shifting token "number" (7.9: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (7.9: 1)
+-> $$ = nterm exp (7.9: 1)
+Entering state 10
+Reading a token
+Next token is token '\n' (7.10-8.0: )
+Reducing stack 0 by rule 11 (line 111):
+   $1 = token '-' (7.8: )
+   $2 = nterm exp (7.9: 1)
+-> $$ = nterm exp (7.8-9: -1)
+Entering state 28
+Next token is token '\n' (7.10-8.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (7.1-4: -1)
+   $2 = token '=' (7.6: )
+   $3 = nterm exp (7.8-9: -1)
+-> $$ = nterm exp (7.1-9: -1)
+Entering state 8
+Next token is token '\n' (7.10-8.0: )
+Shifting token '\n' (7.10-8.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (7.1-9: -1)
+   $2 = token '\n' (7.10-8.0: )
+-> $$ = nterm line (7.1-8.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-7.0: )
+   $2 = nterm line (7.1-8.0: )
+-> $$ = nterm input (1.1-8.0: )
+Entering state 6
+Reading a token
+Next token is token '\n' (8.1-9.0: )
+Shifting token '\n' (8.1-9.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (8.1-9.0: )
+-> $$ = nterm line (8.1-9.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-8.0: )
+   $2 = nterm line (8.1-9.0: )
+-> $$ = nterm input (1.1-9.0: )
+Entering state 6
+Reading a token
+Next token is token "number" (9.1: 1)
+Shifting token "number" (9.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (9.1: 1)
+-> $$ = nterm exp (9.1: 1)
+Entering state 8
+Reading a token
+Next token is token '-' (9.3: )
+Shifting token '-' (9.3: )
+Entering state 20
+Reading a token
+Next token is token "number" (9.5: 2)
+Shifting token "number" (9.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (9.5: 2)
+-> $$ = nterm exp (9.5: 2)
+Entering state 29
+Reading a token
+Next token is token '-' (9.7: )
+Reducing stack 0 by rule 8 (line 100):
+   $1 = nterm exp (9.1: 1)
+   $2 = token '-' (9.3: )
+   $3 = nterm exp (9.5: 2)
+-> $$ = nterm exp (9.1-5: -1)
+Entering state 8
+Next token is token '-' (9.7: )
+Shifting token '-' (9.7: )
+Entering state 20
+Reading a token
+Next token is token "number" (9.9: 3)
+Shifting token "number" (9.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (9.9: 3)
+-> $$ = nterm exp (9.9: 3)
+Entering state 29
+Reading a token
+Next token is token '=' (9.11: )
+Reducing stack 0 by rule 8 (line 100):
+   $1 = nterm exp (9.1-5: -1)
+   $2 = token '-' (9.7: )
+   $3 = nterm exp (9.9: 3)
+-> $$ = nterm exp (9.1-9: -4)
+Entering state 8
+Next token is token '=' (9.11: )
+Shifting token '=' (9.11: )
+Entering state 19
+Reading a token
+Next token is token '-' (9.13: )
+Shifting token '-' (9.13: )
+Entering state 2
+Reading a token
+Next token is token "number" (9.14: 4)
+Shifting token "number" (9.14: 4)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (9.14: 4)
+-> $$ = nterm exp (9.14: 4)
+Entering state 10
+Reading a token
+Next token is token '\n' (9.15-10.0: )
+Reducing stack 0 by rule 11 (line 111):
+   $1 = token '-' (9.13: )
+   $2 = nterm exp (9.14: 4)
+-> $$ = nterm exp (9.13-14: -4)
+Entering state 28
+Next token is token '\n' (9.15-10.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (9.1-9: -4)
+   $2 = token '=' (9.11: )
+   $3 = nterm exp (9.13-14: -4)
+-> $$ = nterm exp (9.1-14: -4)
+Entering state 8
+Next token is token '\n' (9.15-10.0: )
+Shifting token '\n' (9.15-10.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (9.1-14: -4)
+   $2 = token '\n' (9.15-10.0: )
+-> $$ = nterm line (9.1-10.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-9.0: )
+   $2 = nterm line (9.1-10.0: )
+-> $$ = nterm input (1.1-10.0: )
+Entering state 6
+Reading a token
+Next token is token "number" (10.1: 1)
+Shifting token "number" (10.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (10.1: 1)
+-> $$ = nterm exp (10.1: 1)
+Entering state 8
+Reading a token
+Next token is token '-' (10.3: )
+Shifting token '-' (10.3: )
+Entering state 20
+Reading a token
+Next token is token '(' (10.5: )
+Shifting token '(' (10.5: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.10: 1)
-Shifting token "number" (1.10: 1)
+Next token is token "number" (10.6: 2)
+Shifting token "number" (10.6: 2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.10: 1)
--> $$ = nterm exp (1.10: 1)
+   $1 = token "number" (10.6: 2)
+-> $$ = nterm exp (10.6: 2)
 Entering state 12
 Reading a token
-Next token is token "number" (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Shifting token error (1.10-12: )
-Entering state 11
-Next token is token "number" (1.12: 2)
-Error: discarding token "number" (1.12: 2)
+Next token is token '-' (10.8: )
+Shifting token '-' (10.8: )
+Entering state 20
 Reading a token
-Next token is token ')' (1.13: )
-Entering state 11
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 126):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
-Entering state 30
+Next token is token "number" (10.10: 3)
+Shifting token "number" (10.10: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (10.10: 3)
+-> $$ = nterm exp (10.10: 3)
+Entering state 29
 Reading a token
-Next token is token '=' (1.15: )
-Reducing stack 0 by rule 7 (line 105):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
+Next token is token ')' (10.11: )
+Reducing stack 0 by rule 8 (line 100):
+   $1 = nterm exp (10.6: 2)
+   $2 = token '-' (10.8: )
+   $3 = nterm exp (10.10: 3)
+-> $$ = nterm exp (10.6-10: -1)
+Entering state 12
+Next token is token ')' (10.11: )
+Shifting token ')' (10.11: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 113):
+   $1 = token '(' (10.5: )
+   $2 = nterm exp (10.6-10: -1)
+   $3 = token ')' (10.11: )
+-> $$ = nterm exp (10.5-11: -1)
+Entering state 29
+Reading a token
+Next token is token '=' (10.13: )
+Reducing stack 0 by rule 8 (line 100):
+   $1 = nterm exp (10.1: 1)
+   $2 = token '-' (10.3: )
+   $3 = nterm exp (10.5-11: -1)
+-> $$ = nterm exp (10.1-11: 2)
 Entering state 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
+Next token is token '=' (10.13: )
+Shifting token '=' (10.13: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.17: 1)
-Shifting token "number" (1.17: 1)
+Next token is token "number" (10.15: 2)
+Shifting token "number" (10.15: 2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.17: 1)
--> $$ = nterm exp (1.17: 1)
+   $1 = token "number" (10.15: 2)
+-> $$ = nterm exp (10.15: 2)
 Entering state 28
 Reading a token
-Next token is token '\n' (1.18-2.0: )
+Next token is token '\n' (10.16-11.0: )
 Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
+   $1 = nterm exp (10.1-11: 2)
+   $2 = token '=' (10.13: )
+   $3 = nterm exp (10.15: 2)
+-> $$ = nterm exp (10.1-15: 2)
 Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (10.16-11.0: )
+Shifting token '\n' (10.16-11.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-17: 2222)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+   $1 = nterm exp (10.1-15: 2)
+   $2 = token '\n' (10.16-11.0: )
+-> $$ = nterm line (10.1-11.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-10.0: )
+   $2 = nterm line (10.1-11.0: )
+-> $$ = nterm input (1.1-11.0: )
+Entering state 6
+Reading a token
+Next token is token '\n' (11.1-12.0: )
+Shifting token '\n' (11.1-12.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (11.1-12.0: )
+-> $$ = nterm line (11.1-12.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-11.0: )
+   $2 = nterm line (11.1-12.0: )
+-> $$ = nterm input (1.1-12.0: )
+Entering state 6
+Reading a token
+Next token is token "number" (12.1: 2)
+Shifting token "number" (12.1: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (12.1: 2)
+-> $$ = nterm exp (12.1: 2)
+Entering state 8
+Reading a token
+Next token is token '^' (12.2: )
+Shifting token '^' (12.2: )
+Entering state 24
+Reading a token
+Next token is token "number" (12.3: 2)
+Shifting token "number" (12.3: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (12.3: 2)
+-> $$ = nterm exp (12.3: 2)
+Entering state 33
+Reading a token
+Next token is token '^' (12.4: )
+Shifting token '^' (12.4: )
+Entering state 24
+Reading a token
+Next token is token "number" (12.5: 3)
+Shifting token "number" (12.5: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (12.5: 3)
+-> $$ = nterm exp (12.5: 3)
+Entering state 33
+Reading a token
+Next token is token '=' (12.7: )
+Reducing stack 0 by rule 12 (line 112):
+   $1 = nterm exp (12.3: 2)
+   $2 = token '^' (12.4: )
+   $3 = nterm exp (12.5: 3)
+-> $$ = nterm exp (12.3-5: 8)
+Entering state 33
+Next token is token '=' (12.7: )
+Reducing stack 0 by rule 12 (line 112):
+   $1 = nterm exp (12.1: 2)
+   $2 = token '^' (12.2: )
+   $3 = nterm exp (12.3-5: 8)
+-> $$ = nterm exp (12.1-5: 256)
+Entering state 8
+Next token is token '=' (12.7: )
+Shifting token '=' (12.7: )
+Entering state 19
+Reading a token
+Next token is token "number" (12.9-11: 256)
+Shifting token "number" (12.9-11: 256)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (12.9-11: 256)
+-> $$ = nterm exp (12.9-11: 256)
+Entering state 28
+Reading a token
+Next token is token '\n' (12.12-13.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (12.1-5: 256)
+   $2 = token '=' (12.7: )
+   $3 = nterm exp (12.9-11: 256)
+-> $$ = nterm exp (12.1-11: 256)
+Entering state 8
+Next token is token '\n' (12.12-13.0: )
+Shifting token '\n' (12.12-13.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (12.1-11: 256)
+   $2 = token '\n' (12.12-13.0: )
+-> $$ = nterm line (12.1-13.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-12.0: )
+   $2 = nterm line (12.1-13.0: )
+-> $$ = nterm input (1.1-13.0: )
+Entering state 6
+Reading a token
+Next token is token '(' (13.1: )
+Shifting token '(' (13.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (13.2: 2)
+Shifting token "number" (13.2: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (13.2: 2)
+-> $$ = nterm exp (13.2: 2)
+Entering state 12
+Reading a token
+Next token is token '^' (13.3: )
+Shifting token '^' (13.3: )
+Entering state 24
+Reading a token
+Next token is token "number" (13.4: 2)
+Shifting token "number" (13.4: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (13.4: 2)
+-> $$ = nterm exp (13.4: 2)
+Entering state 33
+Reading a token
+Next token is token ')' (13.5: )
+Reducing stack 0 by rule 12 (line 112):
+   $1 = nterm exp (13.2: 2)
+   $2 = token '^' (13.3: )
+   $3 = nterm exp (13.4: 2)
+-> $$ = nterm exp (13.2-4: 4)
+Entering state 12
+Next token is token ')' (13.5: )
+Shifting token ')' (13.5: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 113):
+   $1 = token '(' (13.1: )
+   $2 = nterm exp (13.2-4: 4)
+   $3 = token ')' (13.5: )
+-> $$ = nterm exp (13.1-5: 4)
+Entering state 8
+Reading a token
+Next token is token '^' (13.6: )
+Shifting token '^' (13.6: )
+Entering state 24
+Reading a token
+Next token is token "number" (13.7: 3)
+Shifting token "number" (13.7: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (13.7: 3)
+-> $$ = nterm exp (13.7: 3)
+Entering state 33
+Reading a token
+Next token is token '=' (13.9: )
+Reducing stack 0 by rule 12 (line 112):
+   $1 = nterm exp (13.1-5: 4)
+   $2 = token '^' (13.6: )
+   $3 = nterm exp (13.7: 3)
+-> $$ = nterm exp (13.1-7: 64)
+Entering state 8
+Next token is token '=' (13.9: )
+Shifting token '=' (13.9: )
+Entering state 19
+Reading a token
+Next token is token "number" (13.11-12: 64)
+Shifting token "number" (13.11-12: 64)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (13.11-12: 64)
+-> $$ = nterm exp (13.11-12: 64)
+Entering state 28
+Reading a token
+Next token is token '\n' (13.13-14.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (13.1-7: 64)
+   $2 = token '=' (13.9: )
+   $3 = nterm exp (13.11-12: 64)
+-> $$ = nterm exp (13.1-12: 64)
+Entering state 8
+Next token is token '\n' (13.13-14.0: )
+Shifting token '\n' (13.13-14.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (13.1-12: 64)
+   $2 = token '\n' (13.13-14.0: )
+-> $$ = nterm line (13.1-14.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-13.0: )
+   $2 = nterm line (13.1-14.0: )
+-> $$ = nterm input (1.1-14.0: )
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token "end of input" (2.1: )
+Shifting token "end of input" (14.1: )
 Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
+Cleanup: popping token "end of input" (14.1: )
+Cleanup: popping nterm input (1.1-14.0: )
+517. calc.at:1409:  ok
+./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./calc.at:1411: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -109127,277 +109700,889 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1409: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1413: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1409: cat stderr
-./calc.at:1411: cat stderr
-input:
-./calc.at:1413: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1409:  $PREPARSER ./calc  input
-  | (* *) + (*) + (*)
-./calc.at:1411:  $PREPARSER ./calc  input
-stderr:
-stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
 Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
 Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
 Reading a token
-Next token is token ')' (1.5: )
-Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 126):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 19
 Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
+Next token is token "number" (1.13: 7)
+Shifting token "number" (1.13: 7)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.13: 7)
+-> $$ = nterm exp (1.13: 7)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.14-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-9: 7)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13: 7)
+-> $$ = nterm exp (1.1-13: 7)
+Entering state 8
+Next token is token '\n' (1.14-2.0: )
+Shifting token '\n' (1.14-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-13: 7)
+   $2 = token '\n' (1.14-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Next token is token "number" (2.1: 1)
+Shifting token "number" (2.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (2.1: 1)
+-> $$ = nterm exp (2.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (2.3: )
+Shifting token '+' (2.3: )
 Entering state 21
 Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
+Next token is token "number" (2.5: 2)
+Shifting token "number" (2.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (2.5: 2)
+-> $$ = nterm exp (2.5: 2)
+Entering state 30
 Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
+Next token is token '*' (2.7: )
+Shifting token '*' (2.7: )
+Entering state 22
 Reading a token
-Next token is token ')' (1.11: )
-Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 126):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
+Next token is token '-' (2.9: )
+Shifting token '-' (2.9: )
+Entering state 2
+Reading a token
+Next token is token "number" (2.10: 3)
+Shifting token "number" (2.10: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (2.10: 3)
+-> $$ = nterm exp (2.10: 3)
+Entering state 10
+Reading a token
+Next token is token '=' (2.12: )
+Reducing stack 0 by rule 11 (line 111):
+   $1 = token '-' (2.9: )
+   $2 = nterm exp (2.10: 3)
+-> $$ = nterm exp (2.9-10: -3)
+Entering state 31
+Next token is token '=' (2.12: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (2.5: 2)
+   $2 = token '*' (2.7: )
+   $3 = nterm exp (2.9-10: -3)
+-> $$ = nterm exp (2.5-10: -6)
 Entering state 30
+Next token is token '=' (2.12: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (2.1: 1)
+   $2 = token '+' (2.3: )
+   $3 = nterm exp (2.5-10: -6)
+-> $$ = nterm exp (2.1-10: -5)
+Entering state 8
+Next token is token '=' (2.12: )
+Shifting token '=' (2.12: )
+Entering state 19
 Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 105):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
+Next token is token '-' (2.14: )
+Shifting token '-' (2.14: )
+Entering state 2
+Reading a token
+Next token is token "number" (2.15: 5)
+Shifting token "number" (2.15: 5)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (2.15: 5)
+-> $$ = nterm exp (2.15: 5)
+Entering state 10
+Reading a token
+Next token is token '\n' (2.16-3.0: )
+Reducing stack 0 by rule 11 (line 111):
+   $1 = token '-' (2.14: )
+   $2 = nterm exp (2.15: 5)
+-> $$ = nterm exp (2.14-15: -5)
+Entering state 28
+Next token is token '\n' (2.16-3.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (2.1-10: -5)
+   $2 = token '=' (2.12: )
+   $3 = nterm exp (2.14-15: -5)
+-> $$ = nterm exp (2.1-15: -5)
 Entering state 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
+Next token is token '\n' (2.16-3.0: )
+Shifting token '\n' (2.16-3.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (2.1-15: -5)
+   $2 = token '\n' (2.16-3.0: )
+-> $$ = nterm line (2.1-3.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-2.0: )
+   $2 = nterm line (2.1-3.0: )
+-> $$ = nterm input (1.1-3.0: )
+Entering state 6
 Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
+Next token is token '\n' (3.1-4.0: )
+Shifting token '\n' (3.1-4.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (3.1-4.0: )
+-> $$ = nterm line (3.1-4.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-3.0: )
+   $2 = nterm line (3.1-4.0: )
+-> $$ = nterm input (1.1-4.0: )
+Entering state 6
+Reading a token
+Next token is token '-' (4.1: )
+Shifting token '-' (4.1: )
+Entering state 2
+Reading a token
+Next token is token "number" (4.2: 1)
+Shifting token "number" (4.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (4.2: 1)
+-> $$ = nterm exp (4.2: 1)
+Entering state 10
+Reading a token
+Next token is token '^' (4.3: )
+Shifting token '^' (4.3: )
+Entering state 24
+Reading a token
+Next token is token "number" (4.4: 2)
+Shifting token "number" (4.4: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (4.4: 2)
+-> $$ = nterm exp (4.4: 2)
+Entering state 33
+Reading a token
+Next token is token '=' (4.6: )
+Reducing stack 0 by rule 12 (line 112):
+   $1 = nterm exp (4.2: 1)
+   $2 = token '^' (4.3: )
+   $3 = nterm exp (4.4: 2)
+-> $$ = nterm exp (4.2-4: 1)
+Entering state 10
+Next token is token '=' (4.6: )
+Reducing stack 0 by rule 11 (line 111):
+   $1 = token '-' (4.1: )
+   $2 = nterm exp (4.2-4: 1)
+-> $$ = nterm exp (4.1-4: -1)
+Entering state 8
+Next token is token '=' (4.6: )
+Shifting token '=' (4.6: )
+Entering state 19
+Reading a token
+Next token is token '-' (4.8: )
+Shifting token '-' (4.8: )
+Entering state 2
+Reading a token
+Next token is token "number" (4.9: 1)
+Shifting token "number" (4.9: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (4.9: 1)
+-> $$ = nterm exp (4.9: 1)
+Entering state 10
+Reading a token
+Next token is token '\n' (4.10-5.0: )
+Reducing stack 0 by rule 11 (line 111):
+   $1 = token '-' (4.8: )
+   $2 = nterm exp (4.9: 1)
+-> $$ = nterm exp (4.8-9: -1)
+Entering state 28
+Next token is token '\n' (4.10-5.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (4.1-4: -1)
+   $2 = token '=' (4.6: )
+   $3 = nterm exp (4.8-9: -1)
+-> $$ = nterm exp (4.1-9: -1)
+Entering state 8
+Next token is token '\n' (4.10-5.0: )
+Shifting token '\n' (4.10-5.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (4.1-9: -1)
+   $2 = token '\n' (4.10-5.0: )
+-> $$ = nterm line (4.1-5.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-4.0: )
+   $2 = nterm line (4.1-5.0: )
+-> $$ = nterm input (1.1-5.0: )
+Entering state 6
+Reading a token
+Next token is token '(' (5.1: )
+Shifting token '(' (5.1: )
 Entering state 4
 Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
+Next token is token '-' (5.2: )
+Shifting token '-' (5.2: )
+Entering state 2
 Reading a token
-Next token is token ')' (1.17: )
-Entering state 11
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 126):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 30
+Next token is token "number" (5.3: 1)
+Shifting token "number" (5.3: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (5.3: 1)
+-> $$ = nterm exp (5.3: 1)
+Entering state 10
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 7 (line 105):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
+Next token is token ')' (5.4: )
+Reducing stack 0 by rule 11 (line 111):
+   $1 = token '-' (5.2: )
+   $2 = nterm exp (5.3: 1)
+-> $$ = nterm exp (5.2-3: -1)
+Entering state 12
+Next token is token ')' (5.4: )
+Shifting token ')' (5.4: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 113):
+   $1 = token '(' (5.1: )
+   $2 = nterm exp (5.2-3: -1)
+   $3 = token ')' (5.4: )
+-> $$ = nterm exp (5.1-4: -1)
 Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Reading a token
+Next token is token '^' (5.5: )
+Shifting token '^' (5.5: )
+Entering state 24
+Reading a token
+Next token is token "number" (5.6: 2)
+Shifting token "number" (5.6: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (5.6: 2)
+-> $$ = nterm exp (5.6: 2)
+Entering state 33
+Reading a token
+Next token is token '=' (5.8: )
+Reducing stack 0 by rule 12 (line 112):
+   $1 = nterm exp (5.1-4: -1)
+   $2 = token '^' (5.5: )
+   $3 = nterm exp (5.6: 2)
+-> $$ = nterm exp (5.1-6: 1)
+Entering state 8
+Next token is token '=' (5.8: )
+Shifting token '=' (5.8: )
+Entering state 19
+Reading a token
+Next token is token "number" (5.10: 1)
+Shifting token "number" (5.10: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (5.10: 1)
+-> $$ = nterm exp (5.10: 1)
+Entering state 28
+Reading a token
+Next token is token '\n' (5.11-6.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (5.1-6: 1)
+   $2 = token '=' (5.8: )
+   $3 = nterm exp (5.10: 1)
+-> $$ = nterm exp (5.1-10: 1)
+Entering state 8
+Next token is token '\n' (5.11-6.0: )
+Shifting token '\n' (5.11-6.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-17: 3333)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+   $1 = nterm exp (5.1-10: 1)
+   $2 = token '\n' (5.11-6.0: )
+-> $$ = nterm line (5.1-6.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-5.0: )
+   $2 = nterm line (5.1-6.0: )
+-> $$ = nterm input (1.1-6.0: )
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-Starting parse
-Entering state 0
+Next token is token '\n' (6.1-7.0: )
+Shifting token '\n' (6.1-7.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (6.1-7.0: )
+-> $$ = nterm line (6.1-7.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-6.0: )
+   $2 = nterm line (6.1-7.0: )
+-> $$ = nterm input (1.1-7.0: )
+Entering state 6
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Next token is token '-' (7.1: )
+Shifting token '-' (7.1: )
+Entering state 2
 Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
+Next token is token '-' (7.2: )
+Shifting token '-' (7.2: )
+Entering state 2
 Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
+Next token is token '-' (7.3: )
+Shifting token '-' (7.3: )
+Entering state 2
 Reading a token
-Next token is token ')' (1.5: )
-Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
+Next token is token "number" (7.4: 1)
+Shifting token "number" (7.4: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (7.4: 1)
+-> $$ = nterm exp (7.4: 1)
+Entering state 10
+Reading a token
+Next token is token '=' (7.6: )
+Reducing stack 0 by rule 11 (line 111):
+   $1 = token '-' (7.3: )
+   $2 = nterm exp (7.4: 1)
+-> $$ = nterm exp (7.3-4: -1)
+Entering state 10
+Next token is token '=' (7.6: )
+Reducing stack 0 by rule 11 (line 111):
+   $1 = token '-' (7.2: )
+   $2 = nterm exp (7.3-4: -1)
+-> $$ = nterm exp (7.2-4: 1)
+Entering state 10
+Next token is token '=' (7.6: )
+Reducing stack 0 by rule 11 (line 111):
+   $1 = token '-' (7.1: )
+   $2 = nterm exp (7.2-4: 1)
+-> $$ = nterm exp (7.1-4: -1)
 Entering state 8
+Next token is token '=' (7.6: )
+Shifting token '=' (7.6: )
+Entering state 19
 Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
+Next token is token '-' (7.8: )
+Shifting token '-' (7.8: )
+Entering state 2
 Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
+Next token is token "number" (7.9: 1)
+Shifting token "number" (7.9: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (7.9: 1)
+-> $$ = nterm exp (7.9: 1)
+Entering state 10
+Reading a token
+Next token is token '\n' (7.10-8.0: )
+Reducing stack 0 by rule 11 (line 111):
+   $1 = token '-' (7.8: )
+   $2 = nterm exp (7.9: 1)
+-> $$ = nterm exp (7.8-9: -1)
+Entering state 28
+Next token is token '\n' (7.10-8.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (7.1-4: -1)
+   $2 = token '=' (7.6: )
+   $3 = nterm exp (7.8-9: -1)
+-> $$ = nterm exp (7.1-9: -1)
+Entering state 8
+Next token is token '\n' (7.10-8.0: )
+Shifting token '\n' (7.10-8.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (7.1-9: -1)
+   $2 = token '\n' (7.10-8.0: )
+-> $$ = nterm line (7.1-8.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-7.0: )
+   $2 = nterm line (7.1-8.0: )
+-> $$ = nterm input (1.1-8.0: )
+Entering state 6
+Reading a token
+Next token is token '\n' (8.1-9.0: )
+Shifting token '\n' (8.1-9.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (8.1-9.0: )
+-> $$ = nterm line (8.1-9.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-8.0: )
+   $2 = nterm line (8.1-9.0: )
+-> $$ = nterm input (1.1-9.0: )
+Entering state 6
+Reading a token
+Next token is token "number" (9.1: 1)
+Shifting token "number" (9.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (9.1: 1)
+-> $$ = nterm exp (9.1: 1)
+Entering state 8
+Reading a token
+Next token is token '-' (9.3: )
+Shifting token '-' (9.3: )
+Entering state 20
+Reading a token
+Next token is token "number" (9.5: 2)
+Shifting token "number" (9.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (9.5: 2)
+-> $$ = nterm exp (9.5: 2)
+Entering state 29
+Reading a token
+Next token is token '-' (9.7: )
+Reducing stack 0 by rule 8 (line 100):
+   $1 = nterm exp (9.1: 1)
+   $2 = token '-' (9.3: )
+   $3 = nterm exp (9.5: 2)
+-> $$ = nterm exp (9.1-5: -1)
+Entering state 8
+Next token is token '-' (9.7: )
+Shifting token '-' (9.7: )
+Entering state 20
+Reading a token
+Next token is token "number" (9.9: 3)
+Shifting token "number" (9.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (9.9: 3)
+-> $$ = nterm exp (9.9: 3)
+Entering state 29
+Reading a token
+Next token is token '=' (9.11: )
+Reducing stack 0 by rule 8 (line 100):
+   $1 = nterm exp (9.1-5: -1)
+   $2 = token '-' (9.7: )
+   $3 = nterm exp (9.9: 3)
+-> $$ = nterm exp (9.1-9: -4)
+Entering state 8
+Next token is token '=' (9.11: )
+Shifting token '=' (9.11: )
+Entering state 19
+Reading a token
+Next token is token '-' (9.13: )
+Shifting token '-' (9.13: )
+Entering state 2
+Reading a token
+Next token is token "number" (9.14: 4)
+Shifting token "number" (9.14: 4)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (9.14: 4)
+-> $$ = nterm exp (9.14: 4)
+Entering state 10
+Reading a token
+Next token is token '\n' (9.15-10.0: )
+Reducing stack 0 by rule 11 (line 111):
+   $1 = token '-' (9.13: )
+   $2 = nterm exp (9.14: 4)
+-> $$ = nterm exp (9.13-14: -4)
+Entering state 28
+Next token is token '\n' (9.15-10.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (9.1-9: -4)
+   $2 = token '=' (9.11: )
+   $3 = nterm exp (9.13-14: -4)
+-> $$ = nterm exp (9.1-14: -4)
+Entering state 8
+Next token is token '\n' (9.15-10.0: )
+Shifting token '\n' (9.15-10.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (9.1-14: -4)
+   $2 = token '\n' (9.15-10.0: )
+-> $$ = nterm line (9.1-10.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-9.0: )
+   $2 = nterm line (9.1-10.0: )
+-> $$ = nterm input (1.1-10.0: )
+Entering state 6
+Reading a token
+Next token is token "number" (10.1: 1)
+Shifting token "number" (10.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (10.1: 1)
+-> $$ = nterm exp (10.1: 1)
+Entering state 8
+Reading a token
+Next token is token '-' (10.3: )
+Shifting token '-' (10.3: )
+Entering state 20
+Reading a token
+Next token is token '(' (10.5: )
+Shifting token '(' (10.5: )
 Entering state 4
 Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
+Next token is token "number" (10.6: 2)
+Shifting token "number" (10.6: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (10.6: 2)
+-> $$ = nterm exp (10.6: 2)
+Entering state 12
 Reading a token
-Next token is token ')' (1.11: )
-Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
-Entering state 30
+Next token is token '-' (10.8: )
+Shifting token '-' (10.8: )
+Entering state 20
 Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
+Next token is token "number" (10.10: 3)
+Shifting token "number" (10.10: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (10.10: 3)
+-> $$ = nterm exp (10.10: 3)
+Entering state 29
+Reading a token
+Next token is token ')' (10.11: )
+Reducing stack 0 by rule 8 (line 100):
+   $1 = nterm exp (10.6: 2)
+   $2 = token '-' (10.8: )
+   $3 = nterm exp (10.10: 3)
+-> $$ = nterm exp (10.6-10: -1)
+Entering state 12
+Next token is token ')' (10.11: )
+Shifting token ')' (10.11: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 113):
+   $1 = token '(' (10.5: )
+   $2 = nterm exp (10.6-10: -1)
+   $3 = token ')' (10.11: )
+-> $$ = nterm exp (10.5-11: -1)
+Entering state 29
+Reading a token
+Next token is token '=' (10.13: )
+Reducing stack 0 by rule 8 (line 100):
+   $1 = nterm exp (10.1: 1)
+   $2 = token '-' (10.3: )
+   $3 = nterm exp (10.5-11: -1)
+-> $$ = nterm exp (10.1-11: 2)
 Entering state 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
+Next token is token '=' (10.13: )
+Shifting token '=' (10.13: )
+Entering state 19
 Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
+Next token is token "number" (10.15: 2)
+Shifting token "number" (10.15: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (10.15: 2)
+-> $$ = nterm exp (10.15: 2)
+Entering state 28
+Reading a token
+Next token is token '\n' (10.16-11.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (10.1-11: 2)
+   $2 = token '=' (10.13: )
+   $3 = nterm exp (10.15: 2)
+-> $$ = nterm exp (10.1-15: 2)
+Entering state 8
+Next token is token '\n' (10.16-11.0: )
+Shifting token '\n' (10.16-11.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (10.1-15: 2)
+   $2 = token '\n' (10.16-11.0: )
+-> $$ = nterm line (10.1-11.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-10.0: )
+   $2 = nterm line (10.1-11.0: )
+-> $$ = nterm input (1.1-11.0: )
+Entering state 6
+Reading a token
+Next token is token '\n' (11.1-12.0: )
+Shifting token '\n' (11.1-12.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (11.1-12.0: )
+-> $$ = nterm line (11.1-12.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-11.0: )
+   $2 = nterm line (11.1-12.0: )
+-> $$ = nterm input (1.1-12.0: )
+Entering state 6
+Reading a token
+Next token is token "number" (12.1: 2)
+Shifting token "number" (12.1: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (12.1: 2)
+-> $$ = nterm exp (12.1: 2)
+Entering state 8
+Reading a token
+Next token is token '^' (12.2: )
+Shifting token '^' (12.2: )
+Entering state 24
+Reading a token
+Next token is token "number" (12.3: 2)
+Shifting token "number" (12.3: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (12.3: 2)
+-> $$ = nterm exp (12.3: 2)
+Entering state 33
+Reading a token
+Next token is token '^' (12.4: )
+Shifting token '^' (12.4: )
+Entering state 24
+Reading a token
+Next token is token "number" (12.5: 3)
+Shifting token "number" (12.5: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (12.5: 3)
+-> $$ = nterm exp (12.5: 3)
+Entering state 33
+Reading a token
+Next token is token '=' (12.7: )
+Reducing stack 0 by rule 12 (line 112):
+   $1 = nterm exp (12.3: 2)
+   $2 = token '^' (12.4: )
+   $3 = nterm exp (12.5: 3)
+-> $$ = nterm exp (12.3-5: 8)
+Entering state 33
+Next token is token '=' (12.7: )
+Reducing stack 0 by rule 12 (line 112):
+   $1 = nterm exp (12.1: 2)
+   $2 = token '^' (12.2: )
+   $3 = nterm exp (12.3-5: 8)
+-> $$ = nterm exp (12.1-5: 256)
+Entering state 8
+Next token is token '=' (12.7: )
+Shifting token '=' (12.7: )
+Entering state 19
+Reading a token
+Next token is token "number" (12.9-11: 256)
+Shifting token "number" (12.9-11: 256)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (12.9-11: 256)
+-> $$ = nterm exp (12.9-11: 256)
+Entering state 28
+Reading a token
+Next token is token '\n' (12.12-13.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (12.1-5: 256)
+   $2 = token '=' (12.7: )
+   $3 = nterm exp (12.9-11: 256)
+-> $$ = nterm exp (12.1-11: 256)
+Entering state 8
+Next token is token '\n' (12.12-13.0: )
+Shifting token '\n' (12.12-13.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (12.1-11: 256)
+   $2 = token '\n' (12.12-13.0: )
+-> $$ = nterm line (12.1-13.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-12.0: )
+   $2 = nterm line (12.1-13.0: )
+-> $$ = nterm input (1.1-13.0: )
+Entering state 6
+Reading a token
+Next token is token '(' (13.1: )
+Shifting token '(' (13.1: )
 Entering state 4
 Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
+Next token is token "number" (13.2: 2)
+Shifting token "number" (13.2: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (13.2: 2)
+-> $$ = nterm exp (13.2: 2)
+Entering state 12
 Reading a token
-Next token is token ')' (1.17: )
-Entering state 11
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 30
+Next token is token '^' (13.3: )
+Shifting token '^' (13.3: )
+Entering state 24
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
+Next token is token "number" (13.4: 2)
+Shifting token "number" (13.4: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (13.4: 2)
+-> $$ = nterm exp (13.4: 2)
+Entering state 33
+Reading a token
+Next token is token ')' (13.5: )
+Reducing stack 0 by rule 12 (line 112):
+   $1 = nterm exp (13.2: 2)
+   $2 = token '^' (13.3: )
+   $3 = nterm exp (13.4: 2)
+-> $$ = nterm exp (13.2-4: 4)
+Entering state 12
+Next token is token ')' (13.5: )
+Shifting token ')' (13.5: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 113):
+   $1 = token '(' (13.1: )
+   $2 = nterm exp (13.2-4: 4)
+   $3 = token ')' (13.5: )
+-> $$ = nterm exp (13.1-5: 4)
 Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Reading a token
+Next token is token '^' (13.6: )
+Shifting token '^' (13.6: )
+Entering state 24
+Reading a token
+Next token is token "number" (13.7: 3)
+Shifting token "number" (13.7: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (13.7: 3)
+-> $$ = nterm exp (13.7: 3)
+Entering state 33
+Reading a token
+Next token is token '=' (13.9: )
+Reducing stack 0 by rule 12 (line 112):
+   $1 = nterm exp (13.1-5: 4)
+   $2 = token '^' (13.6: )
+   $3 = nterm exp (13.7: 3)
+-> $$ = nterm exp (13.1-7: 64)
+Entering state 8
+Next token is token '=' (13.9: )
+Shifting token '=' (13.9: )
+Entering state 19
+Reading a token
+Next token is token "number" (13.11-12: 64)
+Shifting token "number" (13.11-12: 64)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (13.11-12: 64)
+-> $$ = nterm exp (13.11-12: 64)
+Entering state 28
+Reading a token
+Next token is token '\n' (13.13-14.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (13.1-7: 64)
+   $2 = token '=' (13.9: )
+   $3 = nterm exp (13.11-12: 64)
+-> $$ = nterm exp (13.1-12: 64)
+Entering state 8
+Next token is token '\n' (13.13-14.0: )
+Shifting token '\n' (13.13-14.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-17: 3333)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+   $1 = nterm exp (13.1-12: 64)
+   $2 = token '\n' (13.13-14.0: )
+-> $$ = nterm line (13.1-14.0: )
+Entering state 18
+Reducing stack 0 by rule 2 (line 79):
+   $1 = nterm input (1.1-13.0: )
+   $2 = nterm line (13.1-14.0: )
+-> $$ = nterm input (1.1-14.0: )
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token "end of input" (2.1: )
+Shifting token "end of input" (14.1: )
 Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-524. calc.at:1432: testing Calculator C++ %locations  ...
-./calc.at:1432: mv calc.y.tmp calc.y
+Cleanup: popping token "end of input" (14.1: )
+Cleanup: popping nterm input (1.1-14.0: )
+523. calc.at:1431: testing Calculator C++   ...
+./calc.at:1431: mv calc.y.tmp calc.y
 
+./calc.at:1431: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1411: cat stderr
+  | 1 2
+./calc.at:1413:  $PREPARSER ./calc  input
+
+stderr:
+input:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token "number" (1.3: 2)
+1.3: syntax error, unexpected number
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token "number" (1.3: 2)
+./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (* *) + (*) + (*)
+./calc.at:1411:  $PREPARSER ./calc  input
+
 stderr:
 stderr:
-./calc.at:1413:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token "number" (1.3: 2)
+1.3: syntax error, unexpected number
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token "number" (1.3: 2)
 Starting parse
 Entering state 0
 Reading a token
@@ -109420,7 +110605,7 @@
 Next token is token ')' (1.5: )
 Shifting token ')' (1.5: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.1: )
    $2 = token error (1.2-4: )
    $3 = token ')' (1.5: )
@@ -109447,7 +110632,7 @@
 Next token is token ')' (1.11: )
 Shifting token ')' (1.11: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.9: )
    $2 = token error (1.10: )
    $3 = token ')' (1.11: )
@@ -109455,7 +110640,7 @@
 Entering state 30
 Reading a token
 Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 105):
+Reducing stack 0 by rule 7 (line 99):
    $1 = nterm exp (1.1-5: 1111)
    $2 = token '+' (1.7: )
    $3 = nterm exp (1.9-11: 1111)
@@ -109481,7 +110666,7 @@
 Next token is token ')' (1.17: )
 Shifting token ')' (1.17: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.15: )
    $2 = token error (1.16: )
    $3 = token ')' (1.17: )
@@ -109489,7 +110674,7 @@
 Entering state 30
 Reading a token
 Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 7 (line 105):
+Reducing stack 0 by rule 7 (line 99):
    $1 = nterm exp (1.1-11: 2222)
    $2 = token '+' (1.13: )
    $3 = nterm exp (1.15-17: 1111)
@@ -109513,7 +110698,18 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1413: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -109629,8 +110825,11 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-stderr:
-./calc.at:1409: "$PERL" -pi -e 'use strict;
+./calc.at:1413: cat stderr
+524. calc.at:1432: testing Calculator C++ %locations  ...
+./calc.at:1432: mv calc.y.tmp calc.y
+
+./calc.at:1411: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -109640,852 +110839,192 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+input:
+./calc.at:1411: cat stderr
+525. calc.at:1433: testing Calculator C++ %locations $NO_EXCEPTIONS_CXXFLAGS ...
+  | 1//2
+./calc.at:1431: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+./calc.at:1413:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1433: mv calc.y.tmp calc.y
+
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token ')' (1.2: )
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token ')' (1.2: )
-Shifting token ')' (1.2: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.2: )
--> $$ = nterm exp (1.1-2: 1111)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 23
 Reading a token
-Next token is token '(' (1.6: )
-Shifting token '(' (1.6: )
-Entering state 4
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '/' (1.3: )
+./calc.at:1433: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1411:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
 Reading a token
-Next token is token "number" (1.7: 1)
-Shifting token "number" (1.7: 1)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.7: 1)
--> $$ = nterm exp (1.7: 1)
-Entering state 12
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
 Reading a token
-Next token is token '+' (1.9: )
-Shifting token '+' (1.9: )
-Entering state 21
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 23
 Reading a token
-Next token is token "number" (1.11: 1)
-Shifting token "number" (1.11: 1)
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '/' (1.3: )
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.11: 1)
--> $$ = nterm exp (1.11: 1)
-Entering state 30
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
 Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.7: 1)
-   $2 = token '+' (1.9: )
-   $3 = nterm exp (1.11: 1)
--> $$ = nterm exp (1.7-11: 2)
-Entering state 12
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
 Entering state 21
 Reading a token
-Next token is token "number" (1.15: 1)
-Shifting token "number" (1.15: 1)
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.15: 1)
--> $$ = nterm exp (1.15: 1)
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
 Entering state 30
 Reading a token
-Next token is token '+' (1.17: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.7-11: 2)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15: 1)
--> $$ = nterm exp (1.7-15: 3)
-Entering state 12
-Next token is token '+' (1.17: )
-Shifting token '+' (1.17: )
-Entering state 21
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
 Reading a token
-Next token is token ')' (1.18: )
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' (1.17: )
-Error: popping nterm exp (1.7-15: 3)
-Shifting token error (1.7-18: )
-Entering state 11
-Next token is token ')' (1.18: )
-Shifting token ')' (1.18: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.6: )
-   $2 = token error (1.7-18: )
-   $3 = token ')' (1.18: )
--> $$ = nterm exp (1.6-18: 1111)
-Entering state 30
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
 Reading a token
-Next token is token '+' (1.20: )
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '+' (1.11: )
 Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-2: 1111)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6-18: 1111)
--> $$ = nterm exp (1.1-18: 2222)
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
-Next token is token '+' (1.20: )
-Shifting token '+' (1.20: )
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
 Entering state 21
 Reading a token
-Next token is token '(' (1.22: )
-Shifting token '(' (1.22: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.23: )
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.23: )
-Entering state 11
-Next token is token '*' (1.23: )
-Error: discarding token '*' (1.23: )
-Reading a token
-Next token is token '*' (1.25: )
-Error: discarding token '*' (1.25: )
-Reading a token
-Next token is token '*' (1.27: )
-Error: discarding token '*' (1.27: )
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
 Reading a token
-Next token is token ')' (1.28: )
-Entering state 11
-Next token is token ')' (1.28: )
-Shifting token ')' (1.28: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.22: )
-   $2 = token error (1.23-27: )
-   $3 = token ')' (1.28: )
--> $$ = nterm exp (1.22-28: 1111)
-Entering state 30
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Reducing stack 0 by rule 17 (line 117):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1413: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+Starting parse
+Entering state 0
 Reading a token
-Next token is token '+' (1.30: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-18: 2222)
-   $2 = token '+' (1.20: )
-   $3 = nterm exp (1.22-28: 1111)
--> $$ = nterm exp (1.1-28: 3333)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
-Next token is token '+' (1.30: )
-Shifting token '+' (1.30: )
-Entering state 21
 Reading a token
-Next token is token '(' (1.32: )
-Shifting token '(' (1.32: )
-Entering state 4
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
 Reading a token
-Next token is token "number" (1.33: 1)
-Shifting token "number" (1.33: 1)
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.33: 1)
--> $$ = nterm exp (1.33: 1)
-Entering state 12
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
 Reading a token
-Next token is token '*' (1.35: )
-Shifting token '*' (1.35: )
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
 Entering state 22
 Reading a token
-Next token is token "number" (1.37: 2)
-Shifting token "number" (1.37: 2)
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.37: 2)
--> $$ = nterm exp (1.37: 2)
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
 Entering state 31
 Reading a token
-Next token is token '*' (1.39: )
+Next token is token '+' (1.11: )
 Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (1.33: 1)
-   $2 = token '*' (1.35: )
-   $3 = nterm exp (1.37: 2)
--> $$ = nterm exp (1.33-37: 2)
-Entering state 12
-Next token is token '*' (1.39: )
-Shifting token '*' (1.39: )
-Entering state 22
-Reading a token
-Next token is token '*' (1.41: )
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' (1.39: )
-Error: popping nterm exp (1.33-37: 2)
-Shifting token error (1.33-41: )
-Entering state 11
-Next token is token '*' (1.41: )
-Error: discarding token '*' (1.41: )
-Reading a token
-Next token is token ')' (1.42: )
-Entering state 11
-Next token is token ')' (1.42: )
-Shifting token ')' (1.42: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.32: )
-   $2 = token error (1.33-41: )
-   $3 = token ')' (1.42: )
--> $$ = nterm exp (1.32-42: 1111)
-Entering state 30
-Reading a token
-Next token is token '=' (1.44: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-28: 3333)
-   $2 = token '+' (1.30: )
-   $3 = nterm exp (1.32-42: 1111)
--> $$ = nterm exp (1.1-42: 4444)
-Entering state 8
-Next token is token '=' (1.44: )
-Shifting token '=' (1.44: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.46: 1)
-Shifting token "number" (1.46: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.46: 1)
--> $$ = nterm exp (1.46: 1)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.47-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-42: 4444)
-   $2 = token '=' (1.44: )
-   $3 = nterm exp (1.46: 1)
-1.1-46: error: 4444 != 1
--> $$ = nterm exp (1.1-46: 4444)
-Entering state 8
-Next token is token '\n' (1.47-2.0: )
-Shifting token '\n' (1.47-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-46: 4444)
-   $2 = token '\n' (1.47-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1409: cat stderr
-./calc.at:1411: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-stderr:
-./calc.at:1411: cat stderr
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token ')' (1.2: )
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token ')' (1.2: )
-Shifting token ')' (1.2: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.2: )
--> $$ = nterm exp (1.1-2: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.6: )
-Shifting token '(' (1.6: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.7: 1)
-Shifting token "number" (1.7: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.7: 1)
--> $$ = nterm exp (1.7: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.9: )
-Shifting token '+' (1.9: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.11: 1)
-Shifting token "number" (1.11: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.11: 1)
--> $$ = nterm exp (1.11: 1)
-Entering state 30
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.7: 1)
-   $2 = token '+' (1.9: )
-   $3 = nterm exp (1.11: 1)
--> $$ = nterm exp (1.7-11: 2)
-Entering state 12
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.15: 1)
-Shifting token "number" (1.15: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.15: 1)
--> $$ = nterm exp (1.15: 1)
-Entering state 30
-Reading a token
-Next token is token '+' (1.17: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.7-11: 2)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15: 1)
--> $$ = nterm exp (1.7-15: 3)
-Entering state 12
-Next token is token '+' (1.17: )
-Shifting token '+' (1.17: )
-Entering state 21
-Reading a token
-Next token is token ')' (1.18: )
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' (1.17: )
-Error: popping nterm exp (1.7-15: 3)
-Shifting token error (1.7-18: )
-Entering state 11
-Next token is token ')' (1.18: )
-Shifting token ')' (1.18: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.6: )
-   $2 = token error (1.7-18: )
-   $3 = token ')' (1.18: )
--> $$ = nterm exp (1.6-18: 1111)
-Entering state 30
-Reading a token
-Next token is token '+' (1.20: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-2: 1111)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6-18: 1111)
--> $$ = nterm exp (1.1-18: 2222)
-Entering state 8
-Next token is token '+' (1.20: )
-Shifting token '+' (1.20: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.22: )
-Shifting token '(' (1.22: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.23: )
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.23: )
-Entering state 11
-Next token is token '*' (1.23: )
-Error: discarding token '*' (1.23: )
-Reading a token
-Next token is token '*' (1.25: )
-Error: discarding token '*' (1.25: )
-Reading a token
-Next token is token '*' (1.27: )
-Error: discarding token '*' (1.27: )
-Reading a token
-Next token is token ')' (1.28: )
-Entering state 11
-Next token is token ')' (1.28: )
-Shifting token ')' (1.28: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.22: )
-   $2 = token error (1.23-27: )
-   $3 = token ')' (1.28: )
--> $$ = nterm exp (1.22-28: 1111)
-Entering state 30
-Reading a token
-Next token is token '+' (1.30: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-18: 2222)
-   $2 = token '+' (1.20: )
-   $3 = nterm exp (1.22-28: 1111)
--> $$ = nterm exp (1.1-28: 3333)
-Entering state 8
-Next token is token '+' (1.30: )
-Shifting token '+' (1.30: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.32: )
-Shifting token '(' (1.32: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.33: 1)
-Shifting token "number" (1.33: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.33: 1)
--> $$ = nterm exp (1.33: 1)
-Entering state 12
-Reading a token
-Next token is token '*' (1.35: )
-Shifting token '*' (1.35: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.37: 2)
-Shifting token "number" (1.37: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.37: 2)
--> $$ = nterm exp (1.37: 2)
-Entering state 31
-Reading a token
-Next token is token '*' (1.39: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (1.33: 1)
-   $2 = token '*' (1.35: )
-   $3 = nterm exp (1.37: 2)
--> $$ = nterm exp (1.33-37: 2)
-Entering state 12
-Next token is token '*' (1.39: )
-Shifting token '*' (1.39: )
-Entering state 22
-Reading a token
-Next token is token '*' (1.41: )
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' (1.39: )
-Error: popping nterm exp (1.33-37: 2)
-Shifting token error (1.33-41: )
-Entering state 11
-Next token is token '*' (1.41: )
-Error: discarding token '*' (1.41: )
-Reading a token
-Next token is token ')' (1.42: )
-Entering state 11
-Next token is token ')' (1.42: )
-Shifting token ')' (1.42: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.32: )
-   $2 = token error (1.33-41: )
-   $3 = token ')' (1.42: )
--> $$ = nterm exp (1.32-42: 1111)
-Entering state 30
-Reading a token
-Next token is token '=' (1.44: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-28: 3333)
-   $2 = token '+' (1.30: )
-   $3 = nterm exp (1.32-42: 1111)
--> $$ = nterm exp (1.1-42: 4444)
-Entering state 8
-Next token is token '=' (1.44: )
-Shifting token '=' (1.44: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.46: 1)
-Shifting token "number" (1.46: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.46: 1)
--> $$ = nterm exp (1.46: 1)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.47-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-42: 4444)
-   $2 = token '=' (1.44: )
-   $3 = nterm exp (1.46: 1)
-1.1-46: error: 4444 != 1
--> $$ = nterm exp (1.1-46: 4444)
-Entering state 8
-Next token is token '\n' (1.47-2.0: )
-Shifting token '\n' (1.47-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-46: 4444)
-   $2 = token '\n' (1.47-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1409:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 107):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 105):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Reducing stack 0 by rule 17 (line 129):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1411:  $PREPARSER ./calc  input
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 107):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 105):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Reducing stack 0 by rule 17 (line 129):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-input:
-stderr:
-./calc.at:1413: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-  | 1 + 2 * 3 + !- ++
-./calc.at:1409:  $PREPARSER ./calc  input
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Reducing stack 0 by rule 17 (line 117):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 107):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 105):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Reducing stack 0 by rule 18 (line 130):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
 Reading a token
 Next token is token '!' (1.13: )
 Shifting token '!' (1.13: )
@@ -110499,79 +111038,11 @@
    $2 = token '+' (1.14: )
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1413: cat stderr
-./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 107):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 105):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Reducing stack 0 by rule 18 (line 130):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1413: cat stderr
   | 1 + 2 * 3 + !- ++
 ./calc.at:1411:  $PREPARSER ./calc  input
 stderr:
-input:
 Starting parse
 Entering state 0
 Reading a token
@@ -110637,22 +111108,8 @@
    $2 = token '-' (1.14: )
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-  | (!!) + (1 2) = 1
 ./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1413:  $PREPARSER ./calc  input
-stderr:
 stderr:
-./calc.at:1432: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
-./calc.at:1409: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 Starting parse
 Entering state 0
 Reading a token
@@ -110718,118 +111175,10 @@
    $2 = token '-' (1.14: )
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '!' (1.2: )
-Shifting token '!' (1.2: )
-Entering state 5
-Reading a token
-Next token is token '!' (1.3: )
-Shifting token '!' (1.3: )
-Entering state 16
-Reducing stack 0 by rule 16 (line 116):
-   $1 = token '!' (1.2: )
-   $2 = token '!' (1.3: )
-Shifting token error (1.2-3: )
-Entering state 11
-Reading a token
-Next token is token ')' (1.4: )
-Shifting token ')' (1.4: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-3: )
-   $3 = token ')' (1.4: )
--> $$ = nterm exp (1.1-4: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.6: )
-Shifting token '+' (1.6: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.8: )
-Shifting token '(' (1.8: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.9: 1)
-Shifting token "number" (1.9: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 1)
--> $$ = nterm exp (1.9: 1)
-Entering state 12
-Reading a token
-Next token is token "number" (1.11: 2)
-1.11: syntax error, unexpected number
-Error: popping nterm exp (1.9: 1)
-Shifting token error (1.9-11: )
-Entering state 11
-Next token is token "number" (1.11: 2)
-Error: discarding token "number" (1.11: 2)
-Reading a token
-Next token is token ')' (1.12: )
-Entering state 11
-Next token is token ')' (1.12: )
-Shifting token ')' (1.12: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.8: )
-   $2 = token error (1.9-11: )
-   $3 = token ')' (1.12: )
--> $$ = nterm exp (1.8-12: 1111)
-Entering state 30
-Reading a token
-Next token is token '=' (1.14: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-4: 1111)
-   $2 = token '+' (1.6: )
-   $3 = nterm exp (1.8-12: 1111)
--> $$ = nterm exp (1.1-12: 2222)
-Entering state 8
-Next token is token '=' (1.14: )
-Shifting token '=' (1.14: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.16: 1)
-Shifting token "number" (1.16: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-12: 2222)
-   $2 = token '=' (1.14: )
-   $3 = nterm exp (1.16: 1)
-1.1-16: error: 2222 != 1
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | error
+./calc.at:1413:  $PREPARSER ./calc  input
+stderr:
 ./calc.at:1411: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -110840,202 +111189,26 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1409: cat stderr
-stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '!' (1.2: )
-Shifting token '!' (1.2: )
-Entering state 5
-Reading a token
-Next token is token '!' (1.3: )
-Shifting token '!' (1.3: )
-Entering state 16
-Reducing stack 0 by rule 16 (line 116):
-   $1 = token '!' (1.2: )
-   $2 = token '!' (1.3: )
-Shifting token error (1.2-3: )
-Entering state 11
-Reading a token
-Next token is token ')' (1.4: )
-Shifting token ')' (1.4: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-3: )
-   $3 = token ')' (1.4: )
--> $$ = nterm exp (1.1-4: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.6: )
-Shifting token '+' (1.6: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.8: )
-Shifting token '(' (1.8: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.9: 1)
-Shifting token "number" (1.9: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 1)
--> $$ = nterm exp (1.9: 1)
-Entering state 12
-Reading a token
-Next token is token "number" (1.11: 2)
-1.11: syntax error, unexpected number
-Error: popping nterm exp (1.9: 1)
-Shifting token error (1.9-11: )
-Entering state 11
-Next token is token "number" (1.11: 2)
-Error: discarding token "number" (1.11: 2)
-Reading a token
-Next token is token ')' (1.12: )
-Entering state 11
-Next token is token ')' (1.12: )
-Shifting token ')' (1.12: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.8: )
-   $2 = token error (1.9-11: )
-   $3 = token ')' (1.12: )
--> $$ = nterm exp (1.8-12: 1111)
-Entering state 30
-Reading a token
-Next token is token '=' (1.14: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-4: 1111)
-   $2 = token '+' (1.6: )
-   $3 = nterm exp (1.8-12: 1111)
--> $$ = nterm exp (1.1-12: 2222)
-Entering state 8
-Next token is token '=' (1.14: )
-Shifting token '=' (1.14: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.16: 1)
-Shifting token "number" (1.16: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-12: 2222)
-   $2 = token '=' (1.14: )
-   $3 = nterm exp (1.16: 1)
-1.1-16: error: 2222 != 1
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
+Next token is token "invalid token" (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" (1.1: )
+./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1432: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 ./calc.at:1411: cat stderr
-input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1409:  $PREPARSER ./calc  input
-./calc.at:1413: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1433: $CXX $CPPFLAGS  $CXXFLAGS $NO_EXCEPTIONS_CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 107):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 105):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '*' (1.14: )
-Shifting token '*' (1.14: )
-Entering state 15
-Reducing stack 0 by rule 19 (line 131):
-   $1 = token '!' (1.13: )
-   $2 = token '*' (1.14: )
-1.14: memory exhausted
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Next token is token "invalid token" (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" (1.1: )
 input:
+  | 1 + 2 * 3 + !* ++
+./calc.at:1411:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -111073,14 +111246,14 @@
 Entering state 31
 Reading a token
 Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 107):
+Reducing stack 0 by rule 9 (line 101):
    $1 = nterm exp (1.5: 2)
    $2 = token '*' (1.7: )
    $3 = nterm exp (1.9: 3)
 -> $$ = nterm exp (1.5-9: 6)
 Entering state 30
 Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 105):
+Reducing stack 0 by rule 7 (line 99):
    $1 = nterm exp (1.1: 1)
    $2 = token '+' (1.3: )
    $3 = nterm exp (1.5-9: 6)
@@ -111097,15 +111270,13 @@
 Next token is token '*' (1.14: )
 Shifting token '*' (1.14: )
 Entering state 15
-Reducing stack 0 by rule 19 (line 131):
+Reducing stack 0 by rule 19 (line 119):
    $1 = token '!' (1.13: )
    $2 = token '*' (1.14: )
 1.14: memory exhausted
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-  | 1 + 2 * 3 + !* ++
-./calc.at:1411:  $PREPARSER ./calc  input
-./calc.at:1413: cat stderr
+./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -111173,8 +111344,18 @@
 1.14: memory exhausted
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1409: "$PERL" -pi -e 'use strict;
+./calc.at:1413: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1413: cat stderr
+./calc.at:1411: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -111185,10 +111366,10 @@
   }eg
 ' expout || exit 77
 input:
-  | (- *) + (1 2) = 1
+./calc.at:1411: cat stderr
+  | 1 = 2 = 3
 ./calc.at:1413:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1409: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -111200,9 +111381,9 @@
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 19
 Reading a token
 Next token is token "number" (1.5: 2)
 Shifting token "number" (1.5: 2)
@@ -111210,300 +111391,49 @@
 Reducing stack 0 by rule 5 (line 88):
    $1 = token "number" (1.5: 2)
 -> $$ = nterm exp (1.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '*' (1.14: )
-Shifting token '*' (1.14: )
-Entering state 15
-Reducing stack 0 by rule 19 (line 119):
-   $1 = token '!' (1.13: )
-   $2 = token '*' (1.14: )
-1.14: memory exhausted
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
-Entering state 2
-Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Reducing stack 0 by rule 15 (line 115):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
-Shifting token error (1.2-4: )
-Entering state 11
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Reading a token
-Next token is token ')' (1.5: )
-Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.10: 1)
-Shifting token "number" (1.10: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.10: 1)
--> $$ = nterm exp (1.10: 1)
-Entering state 12
-Reading a token
-Next token is token "number" (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Shifting token error (1.10-12: )
-Entering state 11
-Next token is token "number" (1.12: 2)
-Error: discarding token "number" (1.12: 2)
-Reading a token
-Next token is token ')' (1.13: )
-Entering state 11
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
-Entering state 30
-Reading a token
-Next token is token '=' (1.15: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
-Entering state 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.17: 1)
-Shifting token "number" (1.17: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.17: 1)
--> $$ = nterm exp (1.17: 1)
 Entering state 28
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
-Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-17: 2222)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Error: popping token '=' (1.3: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '=' (1.7: )
 input:
-stderr:
+./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | (#) + (#) = 2222
-./calc.at:1409:  $PREPARSER ./calc  input
+./calc.at:1411:  $PREPARSER ./calc  input
+stderr:
+stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
-Entering state 2
-Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Reducing stack 0 by rule 15 (line 115):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
-Shifting token error (1.2-4: )
-Entering state 11
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Reading a token
-Next token is token ')' (1.5: )
-Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.10: 1)
-Shifting token "number" (1.10: 1)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.10: 1)
--> $$ = nterm exp (1.10: 1)
-Entering state 12
-Reading a token
-Next token is token "number" (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Shifting token error (1.10-12: )
-Entering state 11
-Next token is token "number" (1.12: 2)
-Error: discarding token "number" (1.12: 2)
-Reading a token
-Next token is token ')' (1.13: )
-Entering state 11
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
-Entering state 30
-Reading a token
-Next token is token '=' (1.15: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
+Reading a token
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.17: 1)
-Shifting token "number" (1.17: 1)
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.17: 1)
--> $$ = nterm exp (1.17: 1)
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
 Entering state 28
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
-Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-17: 2222)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-stderr:
-./calc.at:1411: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Error: popping token '=' (1.3: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '=' (1.7: )
 Starting parse
 Entering state 0
 Reading a token
@@ -111523,7 +111453,7 @@
 Next token is token ')' (1.3: )
 Shifting token ')' (1.3: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.1: )
    $2 = token error (1.2: )
    $3 = token ')' (1.3: )
@@ -111550,7 +111480,7 @@
 Next token is token ')' (1.9: )
 Shifting token ')' (1.9: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.7: )
    $2 = token error (1.8: )
    $3 = token ')' (1.9: )
@@ -111558,7 +111488,7 @@
 Entering state 30
 Reading a token
 Next token is token '=' (1.11: )
-Reducing stack 0 by rule 7 (line 105):
+Reducing stack 0 by rule 7 (line 99):
    $1 = nterm exp (1.1-3: 1111)
    $2 = token '+' (1.5: )
    $3 = nterm exp (1.7-9: 1111)
@@ -111601,20 +111531,8 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1413: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1411: cat stderr
-./calc.at:1413: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -111634,7 +111552,7 @@
 Next token is token ')' (1.3: )
 Shifting token ')' (1.3: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.1: )
    $2 = token error (1.2: )
    $3 = token ')' (1.3: )
@@ -111661,7 +111579,7 @@
 Next token is token ')' (1.9: )
 Shifting token ')' (1.9: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.7: )
    $2 = token error (1.8: )
    $3 = token ')' (1.9: )
@@ -111669,7 +111587,7 @@
 Entering state 30
 Reading a token
 Next token is token '=' (1.11: )
-Reducing stack 0 by rule 7 (line 105):
+Reducing stack 0 by rule 7 (line 99):
    $1 = nterm exp (1.1-3: 1111)
    $2 = token '+' (1.5: )
    $3 = nterm exp (1.7-9: 1111)
@@ -111712,9 +111630,7 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-input:
-input:
-./calc.at:1409: "$PERL" -pi -e 'use strict;
+./calc.at:1413: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -111724,314 +111640,32 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | (* *) + (*) + (*)
+./calc.at:1411: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1413: cat stderr
+./calc.at:1411: cat stderr
+input:
+  | 
+  | +1
 ./calc.at:1413:  $PREPARSER ./calc  input
-  | (#) + (#) = 2222
-./calc.at:1411:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1409: cat stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token ')' (1.3: )
-Entering state 11
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
-Entering state 4
-Reading a token
-1.8: syntax error: invalid character: '#'
-Next token is token error (1.8: )
-Shifting token error (1.8: )
-Entering state 11
-Next token is token error (1.8: )
-Error: discarding token error (1.8: )
-Reading a token
-Next token is token ')' (1.9: )
-Entering state 11
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 30
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
-Entering state 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.13-16: 2222)
-Shifting token "number" (1.13-16: 2222)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
-Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Reading a token
-Next token is token ')' (1.5: )
-Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
-Reading a token
-Next token is token ')' (1.11: )
-Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
-Entering state 30
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
-Entering state 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
-Reading a token
-Next token is token ')' (1.17: )
-Entering state 11
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 30
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
-Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-17: 3333)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
 stderr:
 input:
-./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token ')' (1.3: )
-Entering state 11
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
-Entering state 4
-Reading a token
-1.8: syntax error: invalid character: '#'
-Next token is token error (1.8: )
-Shifting token error (1.8: )
-Entering state 11
-Next token is token error (1.8: )
-Error: discarding token error (1.8: )
-Reading a token
-Next token is token ')' (1.9: )
-Entering state 11
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 30
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
-Entering state 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.13-16: 2222)
-Shifting token "number" (1.13-16: 2222)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (1.1-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 78):
@@ -112039,13 +111673,13 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Cleanup: discarding lookahead token '+' (2.1: )
+./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | (1 + #) = 1111
-./calc.at:1409:  $PREPARSER ./calc  input
+./calc.at:1411:  $PREPARSER ./calc  input
 stderr:
 stderr:
 Starting parse
@@ -112081,7 +111715,7 @@
 Next token is token ')' (1.7: )
 Shifting token ')' (1.7: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.1: )
    $2 = token error (1.2-6: )
    $3 = token ')' (1.7: )
@@ -112125,120 +111759,14 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1411: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
-Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Reading a token
-Next token is token ')' (1.5: )
-Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
-Reading a token
-Next token is token ')' (1.11: )
-Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
-Entering state 30
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
-Entering state 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
-Reading a token
-Next token is token ')' (1.17: )
-Entering state 11
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 30
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
-Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-17: 3333)
-   $2 = token '\n' (1.18-2.0: )
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (1.1-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 78):
@@ -112246,13 +111774,12 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Cleanup: discarding lookahead token '+' (2.1: )
+./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1411: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -112286,7 +111813,7 @@
 Next token is token ')' (1.7: )
 Shifting token ')' (1.7: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.1: )
    $2 = token error (1.2-6: )
    $3 = token ')' (1.7: )
@@ -112330,7 +111857,7 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1413: "$PERL" -pi -e 'use strict;
+./calc.at:1411: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -112340,8 +111867,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-./calc.at:1409: "$PERL" -pi -e 'use strict;
+./calc.at:1413: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -112351,172 +111877,20 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | (1 + #) = 1111
-./calc.at:1411:  $PREPARSER ./calc  input
-./calc.at:1413: cat stderr
-./calc.at:1409: cat stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
-Reading a token
-Next token is token ')' (1.7: )
-Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1411: cat stderr
 input:
+./calc.at:1413: cat stderr
   | (# + 1) = 1111
-./calc.at:1409:  $PREPARSER ./calc  input
-input:
-stderr:
+./calc.at:1411:  $PREPARSER ./calc  input
+./calc.at:1413:  $PREPARSER ./calc  /dev/null
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
-Reading a token
-Next token is token ')' (1.7: )
-Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
 Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" (1.1: )
+stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -112542,7 +111916,7 @@
 Next token is token ')' (1.7: )
 Shifting token ')' (1.7: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.1: )
    $2 = token error (1.2-6: )
    $3 = token ')' (1.7: )
@@ -112586,143 +111960,16 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1413:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Reducing stack 0 by rule 17 (line 117):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Reducing stack 0 by rule 17 (line 117):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
+Now at end of input.
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" (1.1: )
+stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -112748,7 +111995,7 @@
 Next token is token ')' (1.7: )
 Shifting token ')' (1.7: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.1: )
    $2 = token error (1.2-6: )
    $3 = token ')' (1.7: )
@@ -112802,12 +112049,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1413:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1411: cat stderr
-./calc.at:1409: "$PERL" -pi -e 'use strict;
+./calc.at:1413: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -112817,298 +112059,15 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Reducing stack 0 by rule 18 (line 118):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1409: cat stderr
+./calc.at:1411: cat stderr
+./calc.at:1413: cat stderr
 input:
-stderr:
-  | (# + 1) = 1111
+  | (1 + # + 1) = 1111
 ./calc.at:1411:  $PREPARSER ./calc  input
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Reducing stack 0 by rule 18 (line 118):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Reading a token
-Next token is token "number" (1.6: 1)
-Error: discarding token "number" (1.6: 1)
-Reading a token
-Next token is token ')' (1.7: )
-Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Reading a token
-Next token is token "number" (1.6: 1)
-Error: discarding token "number" (1.6: 1)
-Reading a token
-Next token is token ')' (1.7: )
-Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1413: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-  | (1 + # + 1) = 1111
-./calc.at:1409:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1413: cat stderr
+./calc.at:1413:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -113148,7 +112107,7 @@
 Next token is token ')' (1.11: )
 Shifting token ')' (1.11: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.1: )
    $2 = token error (1.2-10: )
    $3 = token ')' (1.11: )
@@ -113192,19 +112151,8 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1411: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1411: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -113212,238 +112160,234 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Reading a token
-Next token is token "number" (1.10: 1)
-Error: discarding token "number" (1.10: 1)
-Reading a token
-Next token is token ')' (1.11: )
+Next token is token ')' (1.2: )
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
 Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
+Next token is token ')' (1.2: )
+Shifting token ')' (1.2: )
 Entering state 26
-Reducing stack 0 by rule 14 (line 126):
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
+   $2 = token error (1.2: )
+   $3 = token ')' (1.2: )
+-> $$ = nterm exp (1.1-2: 1111)
 Entering state 8
 Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.15-18: 1111)
-Shifting token "number" (1.15-18: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
 Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1413:  $PREPARSER ./calc  input
-stderr:
-input:
-Starting parse
-Entering state 0
+Next token is token '(' (1.6: )
+Shifting token '(' (1.6: )
+Entering state 4
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token "number" (1.7: 1)
+Shifting token "number" (1.7: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
+   $1 = token "number" (1.7: 1)
+-> $$ = nterm exp (1.7: 1)
+Entering state 12
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
+Next token is token '+' (1.9: )
+Shifting token '+' (1.9: )
 Entering state 21
 Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
+Next token is token "number" (1.11: 1)
+Shifting token "number" (1.11: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
+   $1 = token "number" (1.11: 1)
+-> $$ = nterm exp (1.11: 1)
 Entering state 30
 Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.7: 1)
+   $2 = token '+' (1.9: )
+   $3 = nterm exp (1.11: 1)
+-> $$ = nterm exp (1.7-11: 2)
+Entering state 12
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
 Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
+Next token is token "number" (1.15: 1)
+Shifting token "number" (1.15: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
+   $1 = token "number" (1.15: 1)
+-> $$ = nterm exp (1.15: 1)
+Entering state 30
 Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
+Next token is token '+' (1.17: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.7-11: 2)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15: 1)
+-> $$ = nterm exp (1.7-15: 3)
+Entering state 12
+Next token is token '+' (1.17: )
+Shifting token '+' (1.17: )
+Entering state 21
+Reading a token
+Next token is token ')' (1.18: )
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' (1.17: )
+Error: popping nterm exp (1.7-15: 3)
+Shifting token error (1.7-18: )
+Entering state 11
+Next token is token ')' (1.18: )
+Shifting token ')' (1.18: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.6: )
+   $2 = token error (1.7-18: )
+   $3 = token ')' (1.18: )
+-> $$ = nterm exp (1.6-18: 1111)
 Entering state 30
-Next token is token '+' (1.11: )
+Reading a token
+Next token is token '+' (1.20: )
 Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
+   $1 = nterm exp (1.1-2: 1111)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6-18: 1111)
+-> $$ = nterm exp (1.1-18: 2222)
 Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
+Next token is token '+' (1.20: )
+Shifting token '+' (1.20: )
 Entering state 21
 Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
+Next token is token '(' (1.22: )
+Shifting token '(' (1.22: )
+Entering state 4
 Reading a token
-Next token is token '*' (1.14: )
-Shifting token '*' (1.14: )
-Entering state 15
-Reducing stack 0 by rule 19 (line 119):
-   $1 = token '!' (1.13: )
-   $2 = token '*' (1.14: )
-1.14: memory exhausted
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1409: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-  | (1 + # + 1) = 1111
-./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1411:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
+Next token is token '*' (1.23: )
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.23: )
+Entering state 11
+Next token is token '*' (1.23: )
+Error: discarding token '*' (1.23: )
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
+Next token is token '*' (1.25: )
+Error: discarding token '*' (1.25: )
+Reading a token
+Next token is token '*' (1.27: )
+Error: discarding token '*' (1.27: )
+Reading a token
+Next token is token ')' (1.28: )
+Entering state 11
+Next token is token ')' (1.28: )
+Shifting token ')' (1.28: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.22: )
+   $2 = token error (1.23-27: )
+   $3 = token ')' (1.28: )
+-> $$ = nterm exp (1.22-28: 1111)
+Entering state 30
+Reading a token
+Next token is token '+' (1.30: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-18: 2222)
+   $2 = token '+' (1.20: )
+   $3 = nterm exp (1.22-28: 1111)
+-> $$ = nterm exp (1.1-28: 3333)
+Entering state 8
+Next token is token '+' (1.30: )
+Shifting token '+' (1.30: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.32: )
+Shifting token '(' (1.32: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
+Next token is token "number" (1.33: 1)
+Shifting token "number" (1.33: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
+   $1 = token "number" (1.33: 1)
+-> $$ = nterm exp (1.33: 1)
 Entering state 12
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
+Next token is token '*' (1.35: )
+Shifting token '*' (1.35: )
+Entering state 22
 Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
+Next token is token "number" (1.37: 2)
+Shifting token "number" (1.37: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.37: 2)
+-> $$ = nterm exp (1.37: 2)
+Entering state 31
 Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
+Next token is token '*' (1.39: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (1.33: 1)
+   $2 = token '*' (1.35: )
+   $3 = nterm exp (1.37: 2)
+-> $$ = nterm exp (1.33-37: 2)
+Entering state 12
+Next token is token '*' (1.39: )
+Shifting token '*' (1.39: )
+Entering state 22
 Reading a token
-Next token is token "number" (1.10: 1)
-Error: discarding token "number" (1.10: 1)
+Next token is token '*' (1.41: )
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' (1.39: )
+Error: popping nterm exp (1.33-37: 2)
+Shifting token error (1.33-41: )
+Entering state 11
+Next token is token '*' (1.41: )
+Error: discarding token '*' (1.41: )
 Reading a token
-Next token is token ')' (1.11: )
+Next token is token ')' (1.42: )
 Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
+Next token is token ')' (1.42: )
+Shifting token ')' (1.42: )
 Entering state 26
 Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
+   $1 = token '(' (1.32: )
+   $2 = token error (1.33-41: )
+   $3 = token ')' (1.42: )
+-> $$ = nterm exp (1.32-42: 1111)
+Entering state 30
 Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
+Next token is token '=' (1.44: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-28: 3333)
+   $2 = token '+' (1.30: )
+   $3 = nterm exp (1.32-42: 1111)
+-> $$ = nterm exp (1.1-42: 4444)
+Entering state 8
+Next token is token '=' (1.44: )
+Shifting token '=' (1.44: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.15-18: 1111)
-Shifting token "number" (1.15-18: 1111)
+Next token is token "number" (1.46: 1)
+Shifting token "number" (1.46: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
+   $1 = token "number" (1.46: 1)
+-> $$ = nterm exp (1.46: 1)
 Entering state 28
 Reading a token
-Next token is token '\n' (1.19-2.0: )
+Next token is token '\n' (1.47-2.0: )
 Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
+   $1 = nterm exp (1.1-42: 4444)
+   $2 = token '=' (1.44: )
+   $3 = nterm exp (1.46: 1)
+1.1-46: error: 4444 != 1
+-> $$ = nterm exp (1.1-46: 4444)
 Entering state 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
+Next token is token '\n' (1.47-2.0: )
+Shifting token '\n' (1.47-2.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
+   $1 = nterm exp (1.1-46: 4444)
+   $2 = token '\n' (1.47-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 78):
@@ -113457,76 +112401,6 @@
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 stderr:
-./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1409: cat stderr
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '*' (1.14: )
-Shifting token '*' (1.14: )
-Entering state 15
-Reducing stack 0 by rule 19 (line 119):
-   $1 = token '!' (1.13: )
-   $2 = token '*' (1.14: )
-1.14: memory exhausted
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-stderr:
-input:
 Starting parse
 Entering state 0
 Reading a token
@@ -113610,18 +112484,7 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-  | (1 + 1) / (1 - 1)
-./calc.at:1409:  $PREPARSER ./calc  input
-./calc.at:1413: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -113630,102 +112493,234 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
+Next token is token ')' (1.2: )
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Next token is token ')' (1.2: )
+Shifting token ')' (1.2: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.2: )
+-> $$ = nterm exp (1.1-2: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.6: )
+Shifting token '(' (1.6: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.7: 1)
+Shifting token "number" (1.7: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
+   $1 = token "number" (1.7: 1)
+-> $$ = nterm exp (1.7: 1)
 Entering state 12
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
+Next token is token '+' (1.9: )
+Shifting token '+' (1.9: )
 Entering state 21
 Reading a token
-Next token is token "number" (1.6: 1)
-Shifting token "number" (1.6: 1)
+Next token is token "number" (1.11: 1)
+Shifting token "number" (1.11: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.6: 1)
--> $$ = nterm exp (1.6: 1)
+   $1 = token "number" (1.11: 1)
+-> $$ = nterm exp (1.11: 1)
 Entering state 30
 Reading a token
-Next token is token ')' (1.7: )
-Reducing stack 0 by rule 7 (line 105):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.7: 1)
+   $2 = token '+' (1.9: )
+   $3 = nterm exp (1.11: 1)
+-> $$ = nterm exp (1.7-11: 2)
 Entering state 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 125):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.15: 1)
+Shifting token "number" (1.15: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.15: 1)
+-> $$ = nterm exp (1.15: 1)
+Entering state 30
+Reading a token
+Next token is token '+' (1.17: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.7-11: 2)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15: 1)
+-> $$ = nterm exp (1.7-15: 3)
+Entering state 12
+Next token is token '+' (1.17: )
+Shifting token '+' (1.17: )
+Entering state 21
+Reading a token
+Next token is token ')' (1.18: )
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' (1.17: )
+Error: popping nterm exp (1.7-15: 3)
+Shifting token error (1.7-18: )
+Entering state 11
+Next token is token ')' (1.18: )
+Shifting token ')' (1.18: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.6: )
+   $2 = token error (1.7-18: )
+   $3 = token ')' (1.18: )
+-> $$ = nterm exp (1.6-18: 1111)
+Entering state 30
+Reading a token
+Next token is token '+' (1.20: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-2: 1111)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6-18: 1111)
+-> $$ = nterm exp (1.1-18: 2222)
 Entering state 8
+Next token is token '+' (1.20: )
+Shifting token '+' (1.20: )
+Entering state 21
 Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 23
+Next token is token '(' (1.22: )
+Shifting token '(' (1.22: )
+Entering state 4
 Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
+Next token is token '*' (1.23: )
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.23: )
+Entering state 11
+Next token is token '*' (1.23: )
+Error: discarding token '*' (1.23: )
+Reading a token
+Next token is token '*' (1.25: )
+Error: discarding token '*' (1.25: )
+Reading a token
+Next token is token '*' (1.27: )
+Error: discarding token '*' (1.27: )
+Reading a token
+Next token is token ')' (1.28: )
+Entering state 11
+Next token is token ')' (1.28: )
+Shifting token ')' (1.28: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.22: )
+   $2 = token error (1.23-27: )
+   $3 = token ')' (1.28: )
+-> $$ = nterm exp (1.22-28: 1111)
+Entering state 30
+Reading a token
+Next token is token '+' (1.30: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-18: 2222)
+   $2 = token '+' (1.20: )
+   $3 = nterm exp (1.22-28: 1111)
+-> $$ = nterm exp (1.1-28: 3333)
+Entering state 8
+Next token is token '+' (1.30: )
+Shifting token '+' (1.30: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.32: )
+Shifting token '(' (1.32: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.12: 1)
-Shifting token "number" (1.12: 1)
+Next token is token "number" (1.33: 1)
+Shifting token "number" (1.33: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.12: 1)
--> $$ = nterm exp (1.12: 1)
+   $1 = token "number" (1.33: 1)
+-> $$ = nterm exp (1.33: 1)
 Entering state 12
 Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 20
+Next token is token '*' (1.35: )
+Shifting token '*' (1.35: )
+Entering state 22
 Reading a token
-Next token is token "number" (1.16: 1)
-Shifting token "number" (1.16: 1)
+Next token is token "number" (1.37: 2)
+Shifting token "number" (1.37: 2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 29
+   $1 = token "number" (1.37: 2)
+-> $$ = nterm exp (1.37: 2)
+Entering state 31
 Reading a token
-Next token is token ')' (1.17: )
-Reducing stack 0 by rule 8 (line 106):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
+Next token is token '*' (1.39: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (1.33: 1)
+   $2 = token '*' (1.35: )
+   $3 = nterm exp (1.37: 2)
+-> $$ = nterm exp (1.33-37: 2)
 Entering state 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 125):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 32
+Next token is token '*' (1.39: )
+Shifting token '*' (1.39: )
+Entering state 22
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 10 (line 108):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
+Next token is token '*' (1.41: )
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' (1.39: )
+Error: popping nterm exp (1.33-37: 2)
+Shifting token error (1.33-41: )
+Entering state 11
+Next token is token '*' (1.41: )
+Error: discarding token '*' (1.41: )
+Reading a token
+Next token is token ')' (1.42: )
+Entering state 11
+Next token is token ')' (1.42: )
+Shifting token ')' (1.42: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.32: )
+   $2 = token error (1.33-41: )
+   $3 = token ')' (1.42: )
+-> $$ = nterm exp (1.32-42: 1111)
+Entering state 30
+Reading a token
+Next token is token '=' (1.44: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-28: 3333)
+   $2 = token '+' (1.30: )
+   $3 = nterm exp (1.32-42: 1111)
+-> $$ = nterm exp (1.1-42: 4444)
 Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '=' (1.44: )
+Shifting token '=' (1.44: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.46: 1)
+Shifting token "number" (1.46: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.46: 1)
+-> $$ = nterm exp (1.46: 1)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.47-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-42: 4444)
+   $2 = token '=' (1.44: )
+   $3 = nterm exp (1.46: 1)
+1.1-46: error: 4444 != 1
+-> $$ = nterm exp (1.1-46: 4444)
+Entering state 8
+Next token is token '\n' (1.47-2.0: )
+Shifting token '\n' (1.47-2.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-46: 4444)
+   $2 = token '\n' (1.47-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 78):
@@ -113738,8 +112733,6 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1413: cat stderr
 ./calc.at:1411: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -113750,6 +112743,25 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1413: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1411: cat stderr
+./calc.at:1413: cat stderr
+input:
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1411:  $PREPARSER ./calc  input
+  | (!!) + (1 2) = 1
+./calc.at:1413:  $PREPARSER ./calc  input
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -113779,7 +112791,7 @@
 Entering state 30
 Reading a token
 Next token is token ')' (1.7: )
-Reducing stack 0 by rule 7 (line 105):
+Reducing stack 0 by rule 7 (line 99):
    $1 = nterm exp (1.2: 1)
    $2 = token '+' (1.4: )
    $3 = nterm exp (1.6: 1)
@@ -113788,7 +112800,7 @@
 Next token is token ')' (1.7: )
 Shifting token ')' (1.7: )
 Entering state 27
-Reducing stack 0 by rule 13 (line 125):
+Reducing stack 0 by rule 13 (line 113):
    $1 = token '(' (1.1: )
    $2 = nterm exp (1.2-6: 2)
    $3 = token ')' (1.7: )
@@ -113824,7 +112836,7 @@
 Entering state 29
 Reading a token
 Next token is token ')' (1.17: )
-Reducing stack 0 by rule 8 (line 106):
+Reducing stack 0 by rule 8 (line 100):
    $1 = nterm exp (1.12: 1)
    $2 = token '-' (1.14: )
    $3 = nterm exp (1.16: 1)
@@ -113833,7 +112845,7 @@
 Next token is token ')' (1.17: )
 Shifting token ')' (1.17: )
 Entering state 27
-Reducing stack 0 by rule 13 (line 125):
+Reducing stack 0 by rule 13 (line 113):
    $1 = token '(' (1.11: )
    $2 = nterm exp (1.12-16: 0)
    $3 = token ')' (1.17: )
@@ -113841,7 +112853,7 @@
 Entering state 32
 Reading a token
 Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 10 (line 108):
+Reducing stack 0 by rule 10 (line 102):
    $1 = nterm exp (1.1-7: 2)
    $2 = token '/' (1.9: )
    $3 = nterm exp (1.11-17: 0)
@@ -113866,11 +112878,7 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-input:
-./calc.at:1411: cat stderr
-  | (#) + (#) = 2222
-./calc.at:1413:  $PREPARSER ./calc  input
-stderr:
+./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -113878,188 +112886,90 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
+Next token is token '!' (1.2: )
+Shifting token '!' (1.2: )
+Entering state 5
 Reading a token
-Next token is token ')' (1.3: )
+Next token is token '!' (1.3: )
+Shifting token '!' (1.3: )
+Entering state 16
+Reducing stack 0 by rule 16 (line 116):
+   $1 = token '!' (1.2: )
+   $2 = token '!' (1.3: )
+Shifting token error (1.2-3: )
 Entering state 11
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
+Reading a token
+Next token is token ')' (1.4: )
+Shifting token ')' (1.4: )
 Entering state 26
 Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
+   $2 = token error (1.2-3: )
+   $3 = token ')' (1.4: )
+-> $$ = nterm exp (1.1-4: 1111)
 Entering state 8
 Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
+Next token is token '+' (1.6: )
+Shifting token '+' (1.6: )
 Entering state 21
 Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
+Next token is token '(' (1.8: )
+Shifting token '(' (1.8: )
 Entering state 4
 Reading a token
-1.8: syntax error: invalid character: '#'
-Next token is token error (1.8: )
-Shifting token error (1.8: )
-Entering state 11
-Next token is token error (1.8: )
-Error: discarding token error (1.8: )
-Reading a token
-Next token is token ')' (1.9: )
-Entering state 11
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 30
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
-Entering state 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.13-16: 2222)
-Shifting token "number" (1.13-16: 2222)
+Next token is token "number" (1.9: 1)
+Shifting token "number" (1.9: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1409: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-stderr:
-  | (1 + 1) / (1 - 1)
-./calc.at:1411:  $PREPARSER ./calc  input
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token ')' (1.3: )
-Entering state 11
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
-Entering state 4
+   $1 = token "number" (1.9: 1)
+-> $$ = nterm exp (1.9: 1)
+Entering state 12
 Reading a token
-1.8: syntax error: invalid character: '#'
-Next token is token error (1.8: )
-Shifting token error (1.8: )
+Next token is token "number" (1.11: 2)
+1.11: syntax error, unexpected number
+Error: popping nterm exp (1.9: 1)
+Shifting token error (1.9-11: )
 Entering state 11
-Next token is token error (1.8: )
-Error: discarding token error (1.8: )
+Next token is token "number" (1.11: 2)
+Error: discarding token "number" (1.11: 2)
 Reading a token
-Next token is token ')' (1.9: )
+Next token is token ')' (1.12: )
 Entering state 11
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
+Next token is token ')' (1.12: )
+Shifting token ')' (1.12: )
 Entering state 26
 Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
+   $1 = token '(' (1.8: )
+   $2 = token error (1.9-11: )
+   $3 = token ')' (1.12: )
+-> $$ = nterm exp (1.8-12: 1111)
 Entering state 30
 Reading a token
-Next token is token '=' (1.11: )
+Next token is token '=' (1.14: )
 Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
+   $1 = nterm exp (1.1-4: 1111)
+   $2 = token '+' (1.6: )
+   $3 = nterm exp (1.8-12: 1111)
+-> $$ = nterm exp (1.1-12: 2222)
 Entering state 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
+Next token is token '=' (1.14: )
+Shifting token '=' (1.14: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.13-16: 2222)
-Shifting token "number" (1.13-16: 2222)
+Next token is token "number" (1.16: 1)
+Shifting token "number" (1.16: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
+   $1 = token "number" (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
 Entering state 28
 Reading a token
 Next token is token '\n' (1.17-2.0: )
 Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
+   $1 = nterm exp (1.1-12: 2222)
+   $2 = token '=' (1.14: )
+   $3 = nterm exp (1.16: 1)
+1.1-16: error: 2222 != 1
 -> $$ = nterm exp (1.1-16: 2222)
 Entering state 8
 Next token is token '\n' (1.17-2.0: )
@@ -114081,7 +112991,7 @@
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 stderr:
-./calc.at:1409: cat stderr
+./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -114197,19 +113107,7 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1413: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-517. calc.at:1409:  ok
 Starting parse
 Entering state 0
 Reading a token
@@ -114217,62 +113115,75 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
+Next token is token '!' (1.2: )
+Shifting token '!' (1.2: )
+Entering state 5
 Reading a token
-Next token is token "number" (1.6: 1)
-Shifting token "number" (1.6: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 30
+Next token is token '!' (1.3: )
+Shifting token '!' (1.3: )
+Entering state 16
+Reducing stack 0 by rule 16 (line 116):
+   $1 = token '!' (1.2: )
+   $2 = token '!' (1.3: )
+Shifting token error (1.2-3: )
+Entering state 11
 Reading a token
-Next token is token ')' (1.7: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
+Next token is token ')' (1.4: )
+Shifting token ')' (1.4: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
+   $2 = token error (1.2-3: )
+   $3 = token ')' (1.4: )
+-> $$ = nterm exp (1.1-4: 1111)
 Entering state 8
 Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 23
+Next token is token '+' (1.6: )
+Shifting token '+' (1.6: )
+Entering state 21
 Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
+Next token is token '(' (1.8: )
+Shifting token '(' (1.8: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.12: 1)
-Shifting token "number" (1.12: 1)
+Next token is token "number" (1.9: 1)
+Shifting token "number" (1.9: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.12: 1)
--> $$ = nterm exp (1.12: 1)
+   $1 = token "number" (1.9: 1)
+-> $$ = nterm exp (1.9: 1)
 Entering state 12
 Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 20
+Next token is token "number" (1.11: 2)
+1.11: syntax error, unexpected number
+Error: popping nterm exp (1.9: 1)
+Shifting token error (1.9-11: )
+Entering state 11
+Next token is token "number" (1.11: 2)
+Error: discarding token "number" (1.11: 2)
+Reading a token
+Next token is token ')' (1.12: )
+Entering state 11
+Next token is token ')' (1.12: )
+Shifting token ')' (1.12: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.8: )
+   $2 = token error (1.9-11: )
+   $3 = token ')' (1.12: )
+-> $$ = nterm exp (1.8-12: 1111)
+Entering state 30
+Reading a token
+Next token is token '=' (1.14: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-4: 1111)
+   $2 = token '+' (1.6: )
+   $3 = nterm exp (1.8-12: 1111)
+-> $$ = nterm exp (1.1-12: 2222)
+Entering state 8
+Next token is token '=' (1.14: )
+Shifting token '=' (1.14: )
+Entering state 19
 Reading a token
 Next token is token "number" (1.16: 1)
 Shifting token "number" (1.16: 1)
@@ -114280,39 +113191,22 @@
 Reducing stack 0 by rule 5 (line 88):
    $1 = token "number" (1.16: 1)
 -> $$ = nterm exp (1.16: 1)
-Entering state 29
+Entering state 28
 Reading a token
-Next token is token ')' (1.17: )
-Reducing stack 0 by rule 8 (line 100):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
+Next token is token '\n' (1.17-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-12: 2222)
+   $2 = token '=' (1.14: )
    $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 32
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 10 (line 102):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
+1.1-16: error: 2222 != 1
+-> $$ = nterm exp (1.1-16: 2222)
 Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 78):
@@ -114325,7 +113219,6 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1413: cat stderr
 ./calc.at:1411: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -114336,12 +113229,23 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1413: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1413: cat stderr
+./calc.at:1411: cat stderr
 input:
-
-  | (1 + #) = 1111
+  | (- *) + (1 2) = 1
 ./calc.at:1413:  $PREPARSER ./calc  input
+518. calc.at:1411:  ok
 stderr:
-./calc.at:1411: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -114349,64 +113253,103 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
+Entering state 2
+Reading a token
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Reducing stack 0 by rule 15 (line 115):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
+Shifting token error (1.2-4: )
+Entering state 11
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Reading a token
+Next token is token ')' (1.5: )
+Entering state 11
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.10: 1)
+Shifting token "number" (1.10: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
+   $1 = token "number" (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
 Entering state 12
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
+Next token is token "number" (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
+Shifting token error (1.10-12: )
 Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
+Next token is token "number" (1.12: 2)
+Error: discarding token "number" (1.12: 2)
 Reading a token
-Next token is token ')' (1.7: )
+Next token is token ')' (1.13: )
 Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
 Entering state 26
 Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
+Entering state 30
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
+Next token is token '=' (1.15: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
+Entering state 8
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
+Next token is token "number" (1.17: 1)
+Shifting token "number" (1.17: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
+   $1 = token "number" (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
 Entering state 28
 Reading a token
-Next token is token '\n' (1.15-2.0: )
+Next token is token '\n' (1.18-2.0: )
 Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
 Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 78):
@@ -114420,7 +113363,6 @@
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-518. calc.at:1411:  ok
 stderr:
 Starting parse
 Entering state 0
@@ -114429,64 +113371,103 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
+Entering state 2
+Reading a token
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Reducing stack 0 by rule 15 (line 115):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
+Shifting token error (1.2-4: )
+Entering state 11
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Reading a token
+Next token is token ')' (1.5: )
+Entering state 11
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.10: 1)
+Shifting token "number" (1.10: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
+   $1 = token "number" (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
 Entering state 12
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
+Next token is token "number" (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
+Shifting token error (1.10-12: )
 Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
+Next token is token "number" (1.12: 2)
+Error: discarding token "number" (1.12: 2)
 Reading a token
-Next token is token ')' (1.7: )
+Next token is token ')' (1.13: )
 Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
 Entering state 26
 Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
+Entering state 30
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
+Next token is token '=' (1.15: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
+Entering state 8
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
 Entering state 19
 Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
+Next token is token "number" (1.17: 1)
+Shifting token "number" (1.17: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
+   $1 = token "number" (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
 Entering state 28
 Reading a token
-Next token is token '\n' (1.15-2.0: )
+Next token is token '\n' (1.18-2.0: )
 Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
 Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 78):
@@ -114511,12 +113492,8 @@
   }eg
 ' expout || exit 77
 ./calc.at:1413: cat stderr
-525. calc.at:1433: testing Calculator C++ %locations $NO_EXCEPTIONS_CXXFLAGS ...
-./calc.at:1433: mv calc.y.tmp calc.y
-
 input:
-./calc.at:1433: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-  | (# + 1) = 1111
+  | (* *) + (*) + (*)
 ./calc.at:1413:  $PREPARSER ./calc  input
 stderr:
 Starting parse
@@ -114526,127 +113503,102 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 Shifting token error (1.2: )
 Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
 Reading a token
-Next token is token "number" (1.6: 1)
-Error: discarding token "number" (1.6: 1)
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
 Reading a token
-Next token is token ')' (1.7: )
+Next token is token ')' (1.5: )
 Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
 Entering state 26
 Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
 Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
 Entering state 4
 Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
 Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Reading a token
-Next token is token "number" (1.6: 1)
-Error: discarding token "number" (1.6: 1)
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
 Reading a token
-Next token is token ')' (1.7: )
+Next token is token ')' (1.11: )
 Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
 Entering state 26
 Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.9-11: 1111)
+Entering state 30
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
 Entering state 8
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
 Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
 Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+Next token is token ')' (1.17: )
+Entering state 11
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 30
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
 Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
+   $1 = nterm exp (1.1-17: 3333)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 78):
@@ -114659,24 +113611,10 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1413: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 526. calc.at:1434: testing Calculator C++ %locations api.location.type={Span}  ...
-./calc.at:1413: cat stderr
 ./calc.at:1434: mv calc.y.tmp calc.y
 
-./calc.at:1434: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1413:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -114685,117 +113623,42 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
 Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
 Reading a token
-Next token is token "number" (1.10: 1)
-Error: discarding token "number" (1.10: 1)
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
 Reading a token
-Next token is token ')' (1.11: )
+Next token is token ')' (1.5: )
 Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
 Entering state 26
 Reducing stack 0 by rule 14 (line 114):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.15-18: 1111)
-Shifting token "number" (1.15-18: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
 Entering state 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
 Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Reading a token
-Next token is token "number" (1.10: 1)
-Error: discarding token "number" (1.10: 1)
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
 Reading a token
 Next token is token ')' (1.11: )
 Entering state 11
@@ -114803,37 +113666,59 @@
 Shifting token ')' (1.11: )
 Entering state 26
 Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
    $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
+-> $$ = nterm exp (1.9-11: 1111)
+Entering state 30
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
 Entering state 8
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
 Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 19
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
 Reading a token
-Next token is token "number" (1.15-18: 1111)
-Shifting token "number" (1.15-18: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 28
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
 Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
+Next token is token ')' (1.17: )
+Entering state 11
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 30
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
 Entering state 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 25
 Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
+   $1 = nterm exp (1.1-17: 3333)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 78):
@@ -114846,6 +113731,7 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1434: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 ./calc.at:1413: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -114856,258 +113742,308 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1433: $CXX $CPPFLAGS  $CXXFLAGS $NO_EXCEPTIONS_CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 ./calc.at:1413: cat stderr
 input:
-  | (1 + 1) / (1 - 1)
+  | 1 + 2 * 3 + !+ ++
 ./calc.at:1413:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
 Entering state 21
 Reading a token
-Next token is token "number" (1.6: 1)
-Shifting token "number" (1.6: 1)
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.6: 1)
--> $$ = nterm exp (1.6: 1)
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
 Entering state 30
 Reading a token
-Next token is token ')' (1.7: )
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '+' (1.11: )
 Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
 Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 23
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
 Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Reducing stack 0 by rule 17 (line 117):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1434: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+stderr:
+Starting parse
+Entering state 0
 Reading a token
-Next token is token "number" (1.12: 1)
-Shifting token "number" (1.12: 1)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
 Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 20
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
 Reading a token
-Next token is token "number" (1.16: 1)
-Shifting token "number" (1.16: 1)
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 29
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
 Reading a token
-Next token is token ')' (1.17: )
-Reducing stack 0 by rule 8 (line 100):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 32
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 10 (line 102):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
 Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stdout:
-./calc.at:1414: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c calc.h
-
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Reducing stack 0 by rule 17 (line 117):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+input:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1413:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
 Entering state 21
 Reading a token
-Next token is token "number" (1.6: 1)
-Shifting token "number" (1.6: 1)
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.6: 1)
--> $$ = nterm exp (1.6: 1)
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
 Entering state 30
 Reading a token
-Next token is token ')' (1.7: )
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '+' (1.11: )
 Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
 Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 23
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
 Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Reducing stack 0 by rule 18 (line 118):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
 Reading a token
-Next token is token "number" (1.12: 1)
-Shifting token "number" (1.12: 1)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
 Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 20
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
 Reading a token
-Next token is token "number" (1.16: 1)
-Shifting token "number" (1.16: 1)
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 29
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
 Reading a token
-Next token is token ')' (1.17: )
-Reducing stack 0 by rule 8 (line 100):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 32
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 10 (line 102):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
 Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Reducing stack 0 by rule 18 (line 118):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+stderr:
+./calc.at:1413: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stdout:
+./calc.at:1413: cat stderr
+./calc.at:1414: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c calc.h
+
+input:
+  | 1 + 2 * 3 + !* ++
 input:
+./calc.at:1413:  $PREPARSER ./calc  input
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -115122,18 +114058,6 @@
   | 2^2^3 = 256
   | (2^2)^3 = 64
 ./calc.at:1414:  $PREPARSER ./calc  input
-./calc.at:1413: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1434: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
-./calc.at:1413: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -115170,6 +114094,141 @@
 -> $$ = nterm exp (1.9: 3)
 Entering state 31
 Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '*' (1.14: )
+Shifting token '*' (1.14: )
+Entering state 15
+Reducing stack 0 by rule 19 (line 119):
+   $1 = token '!' (1.13: )
+   $2 = token '*' (1.14: )
+1.14: memory exhausted
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '*' (1.14: )
+Shifting token '*' (1.14: )
+Entering state 15
+Reducing stack 0 by rule 19 (line 119):
+   $1 = token '!' (1.13: )
+   $2 = token '*' (1.14: )
+1.14: memory exhausted
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Reading a token
 Next token is token '=' (1.11: )
 Reducing stack 0 by rule 9 (line 101):
    $1 = nterm exp (1.5: 2)
@@ -115971,8 +115030,17 @@
 Entering state 17
 Cleanup: popping token "end of input" (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
-519. calc.at:1413:  ok
 ./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1413: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -116810,8 +115878,8 @@
 Entering state 17
 Cleanup: popping token "end of input" (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
+./calc.at:1413: cat stderr
 input:
-
   | 1 2
 ./calc.at:1414:  $PREPARSER ./calc  input
 stderr:
@@ -116831,1963 +115899,9 @@
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token "number" (1.3: 2)
 ./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token "number" (1.3: 2)
-1.3: syntax error, unexpected number
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token "number" (1.3: 2)
-./calc.at:1414: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1414: cat stderr
-input:
-  | 1//2
-./calc.at:1414:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 23
-Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '/' (1.3: )
-./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 23
-Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '/' (1.3: )
-527. calc.at:1435: testing Calculator C++ %header %locations parse.error=verbose %name-prefix "calc" %verbose  ...
-./calc.at:1435: mv calc.y.tmp calc.y
-
-./calc.at:1414: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1435: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1414: cat stderr
-input:
-  | error
-./calc.at:1414:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "invalid token" (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token "invalid token" (1.1: )
-./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "invalid token" (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token "invalid token" (1.1: )
-./calc.at:1414: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1414: cat stderr
-./calc.at:1435: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
-input:
-  | 1 = 2 = 3
-./calc.at:1414:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 28
-Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Error: popping token '=' (1.3: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '=' (1.7: )
-./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 28
-Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Error: popping token '=' (1.3: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '=' (1.7: )
-./calc.at:1414: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1414: cat stderr
-input:
-  | 
-  | +1
-./calc.at:1414:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Cleanup: discarding lookahead token '+' (2.1: )
-./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Cleanup: discarding lookahead token '+' (2.1: )
-./calc.at:1414: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1414: cat stderr
-./calc.at:1414:  $PREPARSER ./calc  /dev/null
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" (1.1: )
-./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" (1.1: )
-./calc.at:1414: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1414: cat stderr
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1414:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token ')' (1.2: )
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token ')' (1.2: )
-Shifting token ')' (1.2: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.2: )
--> $$ = nterm exp (1.1-2: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.6: )
-Shifting token '(' (1.6: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.7: 1)
-Shifting token "number" (1.7: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.7: 1)
--> $$ = nterm exp (1.7: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.9: )
-Shifting token '+' (1.9: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.11: 1)
-Shifting token "number" (1.11: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.11: 1)
--> $$ = nterm exp (1.11: 1)
-Entering state 30
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.7: 1)
-   $2 = token '+' (1.9: )
-   $3 = nterm exp (1.11: 1)
--> $$ = nterm exp (1.7-11: 2)
-Entering state 12
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.15: 1)
-Shifting token "number" (1.15: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.15: 1)
--> $$ = nterm exp (1.15: 1)
-Entering state 30
-Reading a token
-Next token is token '+' (1.17: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.7-11: 2)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15: 1)
--> $$ = nterm exp (1.7-15: 3)
-Entering state 12
-Next token is token '+' (1.17: )
-Shifting token '+' (1.17: )
-Entering state 21
-Reading a token
-Next token is token ')' (1.18: )
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' (1.17: )
-Error: popping nterm exp (1.7-15: 3)
-Shifting token error (1.7-18: )
-Entering state 11
-Next token is token ')' (1.18: )
-Shifting token ')' (1.18: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.6: )
-   $2 = token error (1.7-18: )
-   $3 = token ')' (1.18: )
--> $$ = nterm exp (1.6-18: 1111)
-Entering state 30
-Reading a token
-Next token is token '+' (1.20: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-2: 1111)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6-18: 1111)
--> $$ = nterm exp (1.1-18: 2222)
-Entering state 8
-Next token is token '+' (1.20: )
-Shifting token '+' (1.20: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.22: )
-Shifting token '(' (1.22: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.23: )
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.23: )
-Entering state 11
-Next token is token '*' (1.23: )
-Error: discarding token '*' (1.23: )
-Reading a token
-Next token is token '*' (1.25: )
-Error: discarding token '*' (1.25: )
-Reading a token
-Next token is token '*' (1.27: )
-Error: discarding token '*' (1.27: )
-Reading a token
-Next token is token ')' (1.28: )
-Entering state 11
-Next token is token ')' (1.28: )
-Shifting token ')' (1.28: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.22: )
-   $2 = token error (1.23-27: )
-   $3 = token ')' (1.28: )
--> $$ = nterm exp (1.22-28: 1111)
-Entering state 30
-Reading a token
-Next token is token '+' (1.30: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-18: 2222)
-   $2 = token '+' (1.20: )
-   $3 = nterm exp (1.22-28: 1111)
--> $$ = nterm exp (1.1-28: 3333)
-Entering state 8
-Next token is token '+' (1.30: )
-Shifting token '+' (1.30: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.32: )
-Shifting token '(' (1.32: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.33: 1)
-Shifting token "number" (1.33: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.33: 1)
--> $$ = nterm exp (1.33: 1)
-Entering state 12
-Reading a token
-Next token is token '*' (1.35: )
-Shifting token '*' (1.35: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.37: 2)
-Shifting token "number" (1.37: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.37: 2)
--> $$ = nterm exp (1.37: 2)
-Entering state 31
-Reading a token
-Next token is token '*' (1.39: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (1.33: 1)
-   $2 = token '*' (1.35: )
-   $3 = nterm exp (1.37: 2)
--> $$ = nterm exp (1.33-37: 2)
-Entering state 12
-Next token is token '*' (1.39: )
-Shifting token '*' (1.39: )
-Entering state 22
-Reading a token
-Next token is token '*' (1.41: )
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' (1.39: )
-Error: popping nterm exp (1.33-37: 2)
-Shifting token error (1.33-41: )
-Entering state 11
-Next token is token '*' (1.41: )
-Error: discarding token '*' (1.41: )
-Reading a token
-Next token is token ')' (1.42: )
-Entering state 11
-Next token is token ')' (1.42: )
-Shifting token ')' (1.42: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.32: )
-   $2 = token error (1.33-41: )
-   $3 = token ')' (1.42: )
--> $$ = nterm exp (1.32-42: 1111)
-Entering state 30
-Reading a token
-Next token is token '=' (1.44: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-28: 3333)
-   $2 = token '+' (1.30: )
-   $3 = nterm exp (1.32-42: 1111)
--> $$ = nterm exp (1.1-42: 4444)
-Entering state 8
-Next token is token '=' (1.44: )
-Shifting token '=' (1.44: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.46: 1)
-Shifting token "number" (1.46: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.46: 1)
--> $$ = nterm exp (1.46: 1)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.47-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-42: 4444)
-   $2 = token '=' (1.44: )
-   $3 = nterm exp (1.46: 1)
-1.1-46: error: 4444 != 1
--> $$ = nterm exp (1.1-46: 4444)
-Entering state 8
-Next token is token '\n' (1.47-2.0: )
-Shifting token '\n' (1.47-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-46: 4444)
-   $2 = token '\n' (1.47-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token ')' (1.2: )
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token ')' (1.2: )
-Shifting token ')' (1.2: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.2: )
--> $$ = nterm exp (1.1-2: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.6: )
-Shifting token '(' (1.6: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.7: 1)
-Shifting token "number" (1.7: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.7: 1)
--> $$ = nterm exp (1.7: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.9: )
-Shifting token '+' (1.9: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.11: 1)
-Shifting token "number" (1.11: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.11: 1)
--> $$ = nterm exp (1.11: 1)
-Entering state 30
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.7: 1)
-   $2 = token '+' (1.9: )
-   $3 = nterm exp (1.11: 1)
--> $$ = nterm exp (1.7-11: 2)
-Entering state 12
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.15: 1)
-Shifting token "number" (1.15: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.15: 1)
--> $$ = nterm exp (1.15: 1)
-Entering state 30
-Reading a token
-Next token is token '+' (1.17: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.7-11: 2)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15: 1)
--> $$ = nterm exp (1.7-15: 3)
-Entering state 12
-Next token is token '+' (1.17: )
-Shifting token '+' (1.17: )
-Entering state 21
-Reading a token
-Next token is token ')' (1.18: )
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' (1.17: )
-Error: popping nterm exp (1.7-15: 3)
-Shifting token error (1.7-18: )
-Entering state 11
-Next token is token ')' (1.18: )
-Shifting token ')' (1.18: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.6: )
-   $2 = token error (1.7-18: )
-   $3 = token ')' (1.18: )
--> $$ = nterm exp (1.6-18: 1111)
-Entering state 30
-Reading a token
-Next token is token '+' (1.20: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-2: 1111)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6-18: 1111)
--> $$ = nterm exp (1.1-18: 2222)
-Entering state 8
-Next token is token '+' (1.20: )
-Shifting token '+' (1.20: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.22: )
-Shifting token '(' (1.22: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.23: )
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.23: )
-Entering state 11
-Next token is token '*' (1.23: )
-Error: discarding token '*' (1.23: )
-Reading a token
-Next token is token '*' (1.25: )
-Error: discarding token '*' (1.25: )
-Reading a token
-Next token is token '*' (1.27: )
-Error: discarding token '*' (1.27: )
-Reading a token
-Next token is token ')' (1.28: )
-Entering state 11
-Next token is token ')' (1.28: )
-Shifting token ')' (1.28: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.22: )
-   $2 = token error (1.23-27: )
-   $3 = token ')' (1.28: )
--> $$ = nterm exp (1.22-28: 1111)
-Entering state 30
-Reading a token
-Next token is token '+' (1.30: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-18: 2222)
-   $2 = token '+' (1.20: )
-   $3 = nterm exp (1.22-28: 1111)
--> $$ = nterm exp (1.1-28: 3333)
-Entering state 8
-Next token is token '+' (1.30: )
-Shifting token '+' (1.30: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.32: )
-Shifting token '(' (1.32: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.33: 1)
-Shifting token "number" (1.33: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.33: 1)
--> $$ = nterm exp (1.33: 1)
-Entering state 12
-Reading a token
-Next token is token '*' (1.35: )
-Shifting token '*' (1.35: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.37: 2)
-Shifting token "number" (1.37: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.37: 2)
--> $$ = nterm exp (1.37: 2)
-Entering state 31
-Reading a token
-Next token is token '*' (1.39: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (1.33: 1)
-   $2 = token '*' (1.35: )
-   $3 = nterm exp (1.37: 2)
--> $$ = nterm exp (1.33-37: 2)
-Entering state 12
-Next token is token '*' (1.39: )
-Shifting token '*' (1.39: )
-Entering state 22
-Reading a token
-Next token is token '*' (1.41: )
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' (1.39: )
-Error: popping nterm exp (1.33-37: 2)
-Shifting token error (1.33-41: )
-Entering state 11
-Next token is token '*' (1.41: )
-Error: discarding token '*' (1.41: )
-Reading a token
-Next token is token ')' (1.42: )
-Entering state 11
-Next token is token ')' (1.42: )
-Shifting token ')' (1.42: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.32: )
-   $2 = token error (1.33-41: )
-   $3 = token ')' (1.42: )
--> $$ = nterm exp (1.32-42: 1111)
-Entering state 30
-Reading a token
-Next token is token '=' (1.44: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-28: 3333)
-   $2 = token '+' (1.30: )
-   $3 = nterm exp (1.32-42: 1111)
--> $$ = nterm exp (1.1-42: 4444)
-Entering state 8
-Next token is token '=' (1.44: )
-Shifting token '=' (1.44: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.46: 1)
-Shifting token "number" (1.46: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.46: 1)
--> $$ = nterm exp (1.46: 1)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.47-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-42: 4444)
-   $2 = token '=' (1.44: )
-   $3 = nterm exp (1.46: 1)
-1.1-46: error: 4444 != 1
--> $$ = nterm exp (1.1-46: 4444)
-Entering state 8
-Next token is token '\n' (1.47-2.0: )
-Shifting token '\n' (1.47-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-46: 4444)
-   $2 = token '\n' (1.47-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1414: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1414: cat stderr
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1414:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '!' (1.2: )
-Shifting token '!' (1.2: )
-Entering state 5
-Reading a token
-Next token is token '!' (1.3: )
-Shifting token '!' (1.3: )
-Entering state 16
-Reducing stack 0 by rule 16 (line 116):
-   $1 = token '!' (1.2: )
-   $2 = token '!' (1.3: )
-Shifting token error (1.2-3: )
-Entering state 11
-Reading a token
-Next token is token ')' (1.4: )
-Shifting token ')' (1.4: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-3: )
-   $3 = token ')' (1.4: )
--> $$ = nterm exp (1.1-4: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.6: )
-Shifting token '+' (1.6: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.8: )
-Shifting token '(' (1.8: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.9: 1)
-Shifting token "number" (1.9: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 1)
--> $$ = nterm exp (1.9: 1)
-Entering state 12
-Reading a token
-Next token is token "number" (1.11: 2)
-1.11: syntax error, unexpected number
-Error: popping nterm exp (1.9: 1)
-Shifting token error (1.9-11: )
-Entering state 11
-Next token is token "number" (1.11: 2)
-Error: discarding token "number" (1.11: 2)
-Reading a token
-Next token is token ')' (1.12: )
-Entering state 11
-Next token is token ')' (1.12: )
-Shifting token ')' (1.12: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.8: )
-   $2 = token error (1.9-11: )
-   $3 = token ')' (1.12: )
--> $$ = nterm exp (1.8-12: 1111)
-Entering state 30
-Reading a token
-Next token is token '=' (1.14: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-4: 1111)
-   $2 = token '+' (1.6: )
-   $3 = nterm exp (1.8-12: 1111)
--> $$ = nterm exp (1.1-12: 2222)
-Entering state 8
-Next token is token '=' (1.14: )
-Shifting token '=' (1.14: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.16: 1)
-Shifting token "number" (1.16: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-12: 2222)
-   $2 = token '=' (1.14: )
-   $3 = nterm exp (1.16: 1)
-1.1-16: error: 2222 != 1
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '!' (1.2: )
-Shifting token '!' (1.2: )
-Entering state 5
-Reading a token
-Next token is token '!' (1.3: )
-Shifting token '!' (1.3: )
-Entering state 16
-Reducing stack 0 by rule 16 (line 116):
-   $1 = token '!' (1.2: )
-   $2 = token '!' (1.3: )
-Shifting token error (1.2-3: )
-Entering state 11
-Reading a token
-Next token is token ')' (1.4: )
-Shifting token ')' (1.4: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-3: )
-   $3 = token ')' (1.4: )
--> $$ = nterm exp (1.1-4: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.6: )
-Shifting token '+' (1.6: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.8: )
-Shifting token '(' (1.8: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.9: 1)
-Shifting token "number" (1.9: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 1)
--> $$ = nterm exp (1.9: 1)
-Entering state 12
-Reading a token
-Next token is token "number" (1.11: 2)
-1.11: syntax error, unexpected number
-Error: popping nterm exp (1.9: 1)
-Shifting token error (1.9-11: )
-Entering state 11
-Next token is token "number" (1.11: 2)
-Error: discarding token "number" (1.11: 2)
-Reading a token
-Next token is token ')' (1.12: )
-Entering state 11
-Next token is token ')' (1.12: )
-Shifting token ')' (1.12: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.8: )
-   $2 = token error (1.9-11: )
-   $3 = token ')' (1.12: )
--> $$ = nterm exp (1.8-12: 1111)
-Entering state 30
-Reading a token
-Next token is token '=' (1.14: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-4: 1111)
-   $2 = token '+' (1.6: )
-   $3 = nterm exp (1.8-12: 1111)
--> $$ = nterm exp (1.1-12: 2222)
-Entering state 8
-Next token is token '=' (1.14: )
-Shifting token '=' (1.14: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.16: 1)
-Shifting token "number" (1.16: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-12: 2222)
-   $2 = token '=' (1.14: )
-   $3 = nterm exp (1.16: 1)
-1.1-16: error: 2222 != 1
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1414: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1414: cat stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1414:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
-Entering state 2
-Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Reducing stack 0 by rule 15 (line 115):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
-Shifting token error (1.2-4: )
-Entering state 11
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Reading a token
-Next token is token ')' (1.5: )
-Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.10: 1)
-Shifting token "number" (1.10: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.10: 1)
--> $$ = nterm exp (1.10: 1)
-Entering state 12
-Reading a token
-Next token is token "number" (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Shifting token error (1.10-12: )
-Entering state 11
-Next token is token "number" (1.12: 2)
-Error: discarding token "number" (1.12: 2)
-Reading a token
-Next token is token ')' (1.13: )
-Entering state 11
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
-Entering state 30
-Reading a token
-Next token is token '=' (1.15: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
-Entering state 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.17: 1)
-Shifting token "number" (1.17: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.17: 1)
--> $$ = nterm exp (1.17: 1)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
-Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-17: 2222)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
-Entering state 2
-Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Reducing stack 0 by rule 15 (line 115):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
-Shifting token error (1.2-4: )
-Entering state 11
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Reading a token
-Next token is token ')' (1.5: )
-Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.10: 1)
-Shifting token "number" (1.10: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.10: 1)
--> $$ = nterm exp (1.10: 1)
-Entering state 12
-Reading a token
-Next token is token "number" (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Shifting token error (1.10-12: )
-Entering state 11
-Next token is token "number" (1.12: 2)
-Error: discarding token "number" (1.12: 2)
-Reading a token
-Next token is token ')' (1.13: )
-Entering state 11
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
-Entering state 30
-Reading a token
-Next token is token '=' (1.15: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
-Entering state 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.17: 1)
-Shifting token "number" (1.17: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.17: 1)
--> $$ = nterm exp (1.17: 1)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
-Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-17: 2222)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1414: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1414: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1414:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
-Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Reading a token
-Next token is token ')' (1.5: )
-Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
-Reading a token
-Next token is token ')' (1.11: )
-Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
-Entering state 30
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
-Entering state 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
-Reading a token
-Next token is token ')' (1.17: )
-Entering state 11
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 30
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
-Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-17: 3333)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
-Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Reading a token
-Next token is token ')' (1.5: )
-Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
-Reading a token
-Next token is token ')' (1.11: )
-Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
-Entering state 30
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
-Entering state 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
-Reading a token
-Next token is token ')' (1.17: )
-Entering state 11
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 30
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
-Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-17: 3333)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1414: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1414: cat stderr
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1414:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Reducing stack 0 by rule 17 (line 117):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Reducing stack 0 by rule 17 (line 117):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1414:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Reducing stack 0 by rule 18 (line 118):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Reducing stack 0 by rule 18 (line 118):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1414: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1414: cat stderr
-input:
-  | 1 + 2 * 3 + !* ++
-./calc.at:1414:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '*' (1.14: )
-Shifting token '*' (1.14: )
-Entering state 15
-Reducing stack 0 by rule 19 (line 119):
-   $1 = token '!' (1.13: )
-   $2 = token '*' (1.14: )
-1.14: memory exhausted
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 22
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 31
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 101):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 30
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 21
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '*' (1.14: )
-Shifting token '*' (1.14: )
-Entering state 15
-Reducing stack 0 by rule 19 (line 119):
-   $1 = token '!' (1.13: )
-   $2 = token '*' (1.14: )
-1.14: memory exhausted
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1414: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1414: cat stderr
 input:
   | (#) + (#) = 2222
-./calc.at:1414:  $PREPARSER ./calc  input
+./calc.at:1413:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -118886,8 +116000,24 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token "number" (1.3: 2)
+1.3: syntax error, unexpected number
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token "number" (1.3: 2)
 Starting parse
 Entering state 0
 Reading a token
@@ -118985,6 +116115,16 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1413: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1414: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -118995,90 +116135,27 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+./calc.at:1413: cat stderr
+stdout:
 ./calc.at:1414: cat stderr
+./calc.at:1416: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.c calc.h
+
 input:
   | (1 + #) = 1111
-./calc.at:1414:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
-Reading a token
-Next token is token ')' (1.7: )
-Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1413:  $PREPARSER ./calc  input
 stderr:
+input:
+input:
 Starting parse
 Entering state 0
 Reading a token
@@ -119156,623 +116233,8 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1414: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1414: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1414:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Reading a token
-Next token is token "number" (1.6: 1)
-Error: discarding token "number" (1.6: 1)
-Reading a token
-Next token is token ')' (1.7: )
-Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Reading a token
-Next token is token "number" (1.6: 1)
-Error: discarding token "number" (1.6: 1)
-Reading a token
-Next token is token ')' (1.7: )
-Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1414: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1414: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1414:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Reading a token
-Next token is token "number" (1.10: 1)
-Error: discarding token "number" (1.10: 1)
-Reading a token
-Next token is token ')' (1.11: )
-Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.15-18: 1111)
-Shifting token "number" (1.15-18: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Reading a token
-Next token is token "number" (1.10: 1)
-Error: discarding token "number" (1.10: 1)
-Reading a token
-Next token is token ')' (1.11: )
-Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.15-18: 1111)
-Shifting token "number" (1.15-18: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 28
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack 0 by rule 6 (line 89):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1414: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1414: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
+  | 1//2
 ./calc.at:1414:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.6: 1)
-Shifting token "number" (1.6: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 30
-Reading a token
-Next token is token ')' (1.7: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
-Entering state 8
-Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 23
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.12: 1)
-Shifting token "number" (1.12: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 20
-Reading a token
-Next token is token "number" (1.16: 1)
-Shifting token "number" (1.16: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 29
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack 0 by rule 8 (line 100):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 32
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 10 (line 102):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
-Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.6: 1)
-Shifting token "number" (1.6: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 30
-Reading a token
-Next token is token ')' (1.7: )
-Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
-Entering state 8
-Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 23
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.12: 1)
-Shifting token "number" (1.12: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 20
-Reading a token
-Next token is token "number" (1.16: 1)
-Shifting token "number" (1.16: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 29
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack 0 by rule 8 (line 100):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 27
-Reducing stack 0 by rule 13 (line 113):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 32
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 10 (line 102):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
-Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 25
-Reducing stack 0 by rule 4 (line 84):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 17
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1414: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1414: cat stderr
-520. calc.at:1414:  ok
-
-528. calc.at:1437: testing Calculator C++ %locations parse.error=verbose api.prefix={calc} %verbose  ...
-./calc.at:1437: mv calc.y.tmp calc.y
-
-./calc.at:1437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-stderr:
-stdout:
-./calc.at:1437: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
-./calc.at:1416: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.c calc.h
-
-input:
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -119787,6 +116249,8 @@
   | 2^2^3 = 256
   | (2^2)^3 = 64
 ./calc.at:1416:  $PREPARSER ./calc  input
+./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -120624,7 +117088,107 @@
 Entering state 17
 Cleanup: popping token "end of input" (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 23
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '/' (1.3: )
 ./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
+Reading a token
+Next token is token ')' (1.7: )
+Entering state 11
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -121462,9 +118026,40 @@
 Entering state 17
 Cleanup: popping token "end of input" (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 23
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '/' (1.3: )
+./calc.at:1413: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 input:
   | 1 2
 ./calc.at:1416:  $PREPARSER ./calc  input
+./calc.at:1413: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -121482,7 +118077,20 @@
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token "number" (1.3: 2)
 ./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1414: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
 stderr:
+  | (# + 1) = 1111
+./calc.at:1413:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -121498,6 +118106,77 @@
 1.3: syntax error, unexpected number
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token "number" (1.3: 2)
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Reading a token
+Next token is token "number" (1.6: 1)
+Error: discarding token "number" (1.6: 1)
+Reading a token
+Next token is token ')' (1.7: )
+Entering state 11
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1414: cat stderr
 ./calc.at:1416: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -121508,8 +118187,89 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1416: cat stderr
 input:
+stderr:
+  | error
+./calc.at:1414:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Reading a token
+Next token is token "number" (1.6: 1)
+Error: discarding token "number" (1.6: 1)
+Reading a token
+Next token is token ')' (1.7: )
+Entering state 11
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+input:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "invalid token" (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" (1.1: )
   | 1//2
 ./calc.at:1416:  $PREPARSER ./calc  input
 stderr:
@@ -121533,7 +118293,26 @@
 Error: popping token '/' (1.2: )
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '/' (1.3: )
+./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1413: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+./calc.at:1413: cat stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "invalid token" (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" (1.1: )
 stderr:
 Starting parse
 Entering state 0
@@ -121555,7 +118334,8 @@
 Error: popping token '/' (1.2: )
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '/' (1.3: )
-./calc.at:1416: "$PERL" -pi -e 'use strict;
+input:
+./calc.at:1414: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -121565,25 +118345,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1416: cat stderr
-input:
-  | error
-./calc.at:1416:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "invalid token" (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token "invalid token" (1.1: )
-./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "invalid token" (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token "invalid token" (1.1: )
 ./calc.at:1416: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -121594,11 +118355,186 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+  | (1 + # + 1) = 1111
+./calc.at:1413:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1414: cat stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Reading a token
+Next token is token "number" (1.10: 1)
+Error: discarding token "number" (1.10: 1)
+Reading a token
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.15-18: 1111)
+Shifting token "number" (1.15-18: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1416: cat stderr
+./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 input:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Reading a token
+Next token is token "number" (1.10: 1)
+Error: discarding token "number" (1.10: 1)
+Reading a token
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.15-18: 1111)
+Shifting token "number" (1.15-18: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
   | 1 = 2 = 3
-./calc.at:1416:  $PREPARSER ./calc  input
+./calc.at:1414:  $PREPARSER ./calc  input
+input:
 stderr:
+  | error
+./calc.at:1416:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -121628,7 +118564,8 @@
 Error: popping token '=' (1.3: )
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '=' (1.7: )
-./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -121659,7 +118596,7 @@
 Error: popping token '=' (1.3: )
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '=' (1.7: )
-./calc.at:1416: "$PERL" -pi -e 'use strict;
+./calc.at:1413: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -121669,53 +118606,22 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1416: cat stderr
-input:
-  | 
-  | +1
-./calc.at:1416:  $PREPARSER ./calc  input
-stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Cleanup: discarding lookahead token '+' (2.1: )
+Next token is token "invalid token" (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" (1.1: )
 ./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1413: cat stderr
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 83):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 78):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Cleanup: discarding lookahead token '+' (2.1: )
-./calc.at:1416: "$PERL" -pi -e 'use strict;
+Next token is token "invalid token" (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" (1.1: )
+./calc.at:1414: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -121725,23 +118631,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1416: cat stderr
-./calc.at:1416:  $PREPARSER ./calc  /dev/null
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" (1.1: )
-./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" (1.1: )
 ./calc.at:1416: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -121752,10 +118641,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1416: cat stderr
 input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1416:  $PREPARSER ./calc  input
+  | (1 + 1) / (1 - 1)
+./calc.at:1413:  $PREPARSER ./calc  input
+./calc.at:1416: cat stderr
+./calc.at:1414: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -121764,59 +118654,1292 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token ')' (1.2: )
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token ')' (1.2: )
-Shifting token ')' (1.2: )
-Entering state 26
-Reducing stack 0 by rule 14 (line 114):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.2: )
--> $$ = nterm exp (1.1-2: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 21
-Reading a token
-Next token is token '(' (1.6: )
-Shifting token '(' (1.6: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.7: 1)
-Shifting token "number" (1.7: 1)
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.7: 1)
--> $$ = nterm exp (1.7: 1)
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
 Entering state 12
 Reading a token
-Next token is token '+' (1.9: )
-Shifting token '+' (1.9: )
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
 Entering state 21
 Reading a token
-Next token is token "number" (1.11: 1)
-Shifting token "number" (1.11: 1)
+Next token is token "number" (1.6: 1)
+Shifting token "number" (1.6: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 88):
-   $1 = token "number" (1.11: 1)
--> $$ = nterm exp (1.11: 1)
+   $1 = token "number" (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
 Entering state 30
 Reading a token
-Next token is token '+' (1.13: )
+Next token is token ')' (1.7: )
 Reducing stack 0 by rule 7 (line 99):
-   $1 = nterm exp (1.7: 1)
-   $2 = token '+' (1.9: )
-   $3 = nterm exp (1.11: 1)
--> $$ = nterm exp (1.7-11: 2)
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
 Entering state 12
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 21
-Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 113):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Reading a token
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 23
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.12: 1)
+Shifting token "number" (1.12: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.16: 1)
+Shifting token "number" (1.16: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 29
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack 0 by rule 8 (line 100):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 113):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 32
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 10 (line 102):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+input:
+input:
+./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 
+  | +1
+./calc.at:1414:  $PREPARSER ./calc  input
+  | 1 = 2 = 3
+./calc.at:1416:  $PREPARSER ./calc  input
+stderr:
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.6: 1)
+Shifting token "number" (1.6: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 30
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 113):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Reading a token
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 23
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.12: 1)
+Shifting token "number" (1.12: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.16: 1)
+Shifting token "number" (1.16: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 29
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack 0 by rule 8 (line 100):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 113):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 32
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 10 (line 102):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 28
+Reading a token
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Error: popping token '=' (1.3: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '=' (1.7: )
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Cleanup: discarding lookahead token '+' (2.1: )
+./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1413: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Cleanup: discarding lookahead token '+' (2.1: )
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 28
+Reading a token
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Error: popping token '=' (1.3: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '=' (1.7: )
+./calc.at:1413: cat stderr
+./calc.at:1416: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1414: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+519. calc.at:1413:  ok
+./calc.at:1416: cat stderr
+./calc.at:1414: cat stderr
+./calc.at:1414:  $PREPARSER ./calc  /dev/null
+input:
+stderr:
+  | 
+  | +1
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" (1.1: )
+./calc.at:1416:  $PREPARSER ./calc  input
+stderr:
+
+./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Cleanup: discarding lookahead token '+' (2.1: )
+./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 83):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Cleanup: discarding lookahead token '+' (2.1: )
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" (1.1: )
+./calc.at:1414: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1416: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1414: cat stderr
+./calc.at:1416: cat stderr
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1414:  $PREPARSER ./calc  input
+./calc.at:1416:  $PREPARSER ./calc  /dev/null
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token ')' (1.2: )
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Next token is token ')' (1.2: )
+Shifting token ')' (1.2: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.2: )
+-> $$ = nterm exp (1.1-2: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.6: )
+Shifting token '(' (1.6: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.7: 1)
+Shifting token "number" (1.7: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.7: 1)
+-> $$ = nterm exp (1.7: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.9: )
+Shifting token '+' (1.9: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.11: 1)
+Shifting token "number" (1.11: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.11: 1)
+-> $$ = nterm exp (1.11: 1)
+Entering state 30
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.7: 1)
+   $2 = token '+' (1.9: )
+   $3 = nterm exp (1.11: 1)
+-> $$ = nterm exp (1.7-11: 2)
+Entering state 12
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.15: 1)
+Shifting token "number" (1.15: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.15: 1)
+-> $$ = nterm exp (1.15: 1)
+Entering state 30
+Reading a token
+Next token is token '+' (1.17: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.7-11: 2)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15: 1)
+-> $$ = nterm exp (1.7-15: 3)
+Entering state 12
+Next token is token '+' (1.17: )
+Shifting token '+' (1.17: )
+Entering state 21
+Reading a token
+Next token is token ')' (1.18: )
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' (1.17: )
+Error: popping nterm exp (1.7-15: 3)
+Shifting token error (1.7-18: )
+Entering state 11
+Next token is token ')' (1.18: )
+Shifting token ')' (1.18: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.6: )
+   $2 = token error (1.7-18: )
+   $3 = token ')' (1.18: )
+-> $$ = nterm exp (1.6-18: 1111)
+Entering state 30
+Reading a token
+Next token is token '+' (1.20: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-2: 1111)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6-18: 1111)
+-> $$ = nterm exp (1.1-18: 2222)
+Entering state 8
+Next token is token '+' (1.20: )
+Shifting token '+' (1.20: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.22: )
+Shifting token '(' (1.22: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.23: )
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.23: )
+Entering state 11
+Next token is token '*' (1.23: )
+Error: discarding token '*' (1.23: )
+Reading a token
+Next token is token '*' (1.25: )
+Error: discarding token '*' (1.25: )
+Reading a token
+Next token is token '*' (1.27: )
+Error: discarding token '*' (1.27: )
+Reading a token
+Next token is token ')' (1.28: )
+Entering state 11
+Next token is token ')' (1.28: )
+Shifting token ')' (1.28: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.22: )
+   $2 = token error (1.23-27: )
+   $3 = token ')' (1.28: )
+-> $$ = nterm exp (1.22-28: 1111)
+Entering state 30
+Reading a token
+Next token is token '+' (1.30: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-18: 2222)
+   $2 = token '+' (1.20: )
+   $3 = nterm exp (1.22-28: 1111)
+-> $$ = nterm exp (1.1-28: 3333)
+Entering state 8
+Next token is token '+' (1.30: )
+Shifting token '+' (1.30: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.32: )
+Shifting token '(' (1.32: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.33: 1)
+Shifting token "number" (1.33: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.33: 1)
+-> $$ = nterm exp (1.33: 1)
+Entering state 12
+Reading a token
+Next token is token '*' (1.35: )
+Shifting token '*' (1.35: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.37: 2)
+Shifting token "number" (1.37: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.37: 2)
+-> $$ = nterm exp (1.37: 2)
+Entering state 31
+Reading a token
+Next token is token '*' (1.39: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (1.33: 1)
+   $2 = token '*' (1.35: )
+   $3 = nterm exp (1.37: 2)
+-> $$ = nterm exp (1.33-37: 2)
+Entering state 12
+Next token is token '*' (1.39: )
+Shifting token '*' (1.39: )
+Entering state 22
+Reading a token
+Next token is token '*' (1.41: )
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' (1.39: )
+Error: popping nterm exp (1.33-37: 2)
+Shifting token error (1.33-41: )
+Entering state 11
+Next token is token '*' (1.41: )
+Error: discarding token '*' (1.41: )
+Reading a token
+Next token is token ')' (1.42: )
+Entering state 11
+Next token is token ')' (1.42: )
+Shifting token ')' (1.42: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.32: )
+   $2 = token error (1.33-41: )
+   $3 = token ')' (1.42: )
+-> $$ = nterm exp (1.32-42: 1111)
+Entering state 30
+Reading a token
+Next token is token '=' (1.44: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-28: 3333)
+   $2 = token '+' (1.30: )
+   $3 = nterm exp (1.32-42: 1111)
+-> $$ = nterm exp (1.1-42: 4444)
+Entering state 8
+Next token is token '=' (1.44: )
+Shifting token '=' (1.44: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.46: 1)
+Shifting token "number" (1.46: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.46: 1)
+-> $$ = nterm exp (1.46: 1)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.47-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-42: 4444)
+   $2 = token '=' (1.44: )
+   $3 = nterm exp (1.46: 1)
+1.1-46: error: 4444 != 1
+-> $$ = nterm exp (1.1-46: 4444)
+Entering state 8
+Next token is token '\n' (1.47-2.0: )
+Shifting token '\n' (1.47-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-46: 4444)
+   $2 = token '\n' (1.47-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" (1.1: )
+./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token ')' (1.2: )
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Next token is token ')' (1.2: )
+Shifting token ')' (1.2: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.2: )
+-> $$ = nterm exp (1.1-2: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.6: )
+Shifting token '(' (1.6: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.7: 1)
+Shifting token "number" (1.7: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.7: 1)
+-> $$ = nterm exp (1.7: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.9: )
+Shifting token '+' (1.9: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.11: 1)
+Shifting token "number" (1.11: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.11: 1)
+-> $$ = nterm exp (1.11: 1)
+Entering state 30
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.7: 1)
+   $2 = token '+' (1.9: )
+   $3 = nterm exp (1.11: 1)
+-> $$ = nterm exp (1.7-11: 2)
+Entering state 12
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.15: 1)
+Shifting token "number" (1.15: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.15: 1)
+-> $$ = nterm exp (1.15: 1)
+Entering state 30
+Reading a token
+Next token is token '+' (1.17: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.7-11: 2)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15: 1)
+-> $$ = nterm exp (1.7-15: 3)
+Entering state 12
+Next token is token '+' (1.17: )
+Shifting token '+' (1.17: )
+Entering state 21
+Reading a token
+Next token is token ')' (1.18: )
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' (1.17: )
+Error: popping nterm exp (1.7-15: 3)
+Shifting token error (1.7-18: )
+Entering state 11
+Next token is token ')' (1.18: )
+Shifting token ')' (1.18: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.6: )
+   $2 = token error (1.7-18: )
+   $3 = token ')' (1.18: )
+-> $$ = nterm exp (1.6-18: 1111)
+Entering state 30
+Reading a token
+Next token is token '+' (1.20: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-2: 1111)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6-18: 1111)
+-> $$ = nterm exp (1.1-18: 2222)
+Entering state 8
+Next token is token '+' (1.20: )
+Shifting token '+' (1.20: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.22: )
+Shifting token '(' (1.22: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.23: )
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.23: )
+Entering state 11
+Next token is token '*' (1.23: )
+Error: discarding token '*' (1.23: )
+Reading a token
+Next token is token '*' (1.25: )
+Error: discarding token '*' (1.25: )
+Reading a token
+Next token is token '*' (1.27: )
+Error: discarding token '*' (1.27: )
+Reading a token
+Next token is token ')' (1.28: )
+Entering state 11
+Next token is token ')' (1.28: )
+Shifting token ')' (1.28: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.22: )
+   $2 = token error (1.23-27: )
+   $3 = token ')' (1.28: )
+-> $$ = nterm exp (1.22-28: 1111)
+Entering state 30
+Reading a token
+Next token is token '+' (1.30: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-18: 2222)
+   $2 = token '+' (1.20: )
+   $3 = nterm exp (1.22-28: 1111)
+-> $$ = nterm exp (1.1-28: 3333)
+Entering state 8
+Next token is token '+' (1.30: )
+Shifting token '+' (1.30: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.32: )
+Shifting token '(' (1.32: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.33: 1)
+Shifting token "number" (1.33: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.33: 1)
+-> $$ = nterm exp (1.33: 1)
+Entering state 12
+Reading a token
+Next token is token '*' (1.35: )
+Shifting token '*' (1.35: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.37: 2)
+Shifting token "number" (1.37: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.37: 2)
+-> $$ = nterm exp (1.37: 2)
+Entering state 31
+Reading a token
+Next token is token '*' (1.39: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (1.33: 1)
+   $2 = token '*' (1.35: )
+   $3 = nterm exp (1.37: 2)
+-> $$ = nterm exp (1.33-37: 2)
+Entering state 12
+Next token is token '*' (1.39: )
+Shifting token '*' (1.39: )
+Entering state 22
+Reading a token
+Next token is token '*' (1.41: )
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' (1.39: )
+Error: popping nterm exp (1.33-37: 2)
+Shifting token error (1.33-41: )
+Entering state 11
+Next token is token '*' (1.41: )
+Error: discarding token '*' (1.41: )
+Reading a token
+Next token is token ')' (1.42: )
+Entering state 11
+Next token is token ')' (1.42: )
+Shifting token ')' (1.42: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.32: )
+   $2 = token error (1.33-41: )
+   $3 = token ')' (1.42: )
+-> $$ = nterm exp (1.32-42: 1111)
+Entering state 30
+Reading a token
+Next token is token '=' (1.44: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-28: 3333)
+   $2 = token '+' (1.30: )
+   $3 = nterm exp (1.32-42: 1111)
+-> $$ = nterm exp (1.1-42: 4444)
+Entering state 8
+Next token is token '=' (1.44: )
+Shifting token '=' (1.44: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.46: 1)
+Shifting token "number" (1.46: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.46: 1)
+-> $$ = nterm exp (1.46: 1)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.47-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-42: 4444)
+   $2 = token '=' (1.44: )
+   $3 = nterm exp (1.46: 1)
+1.1-46: error: 4444 != 1
+-> $$ = nterm exp (1.1-46: 4444)
+Entering state 8
+Next token is token '\n' (1.47-2.0: )
+Shifting token '\n' (1.47-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-46: 4444)
+   $2 = token '\n' (1.47-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" (1.1: )
+527. calc.at:1435: testing Calculator C++ %header %locations parse.error=verbose %name-prefix "calc" %verbose  ...
+./calc.at:1435: mv calc.y.tmp calc.y
+
+./calc.at:1414: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1416: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1414: cat stderr
+./calc.at:1435: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+input:
+  | (!!) + (1 2) = 1
+./calc.at:1414:  $PREPARSER ./calc  input
+./calc.at:1416: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '!' (1.2: )
+Shifting token '!' (1.2: )
+Entering state 5
+Reading a token
+Next token is token '!' (1.3: )
+Shifting token '!' (1.3: )
+Entering state 16
+Reducing stack 0 by rule 16 (line 116):
+   $1 = token '!' (1.2: )
+   $2 = token '!' (1.3: )
+Shifting token error (1.2-3: )
+Entering state 11
+Reading a token
+Next token is token ')' (1.4: )
+Shifting token ')' (1.4: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-3: )
+   $3 = token ')' (1.4: )
+-> $$ = nterm exp (1.1-4: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.6: )
+Shifting token '+' (1.6: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.8: )
+Shifting token '(' (1.8: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.9: 1)
+Shifting token "number" (1.9: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 1)
+-> $$ = nterm exp (1.9: 1)
+Entering state 12
+Reading a token
+Next token is token "number" (1.11: 2)
+1.11: syntax error, unexpected number
+Error: popping nterm exp (1.9: 1)
+Shifting token error (1.9-11: )
+Entering state 11
+Next token is token "number" (1.11: 2)
+Error: discarding token "number" (1.11: 2)
+Reading a token
+Next token is token ')' (1.12: )
+Entering state 11
+Next token is token ')' (1.12: )
+Shifting token ')' (1.12: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.8: )
+   $2 = token error (1.9-11: )
+   $3 = token ')' (1.12: )
+-> $$ = nterm exp (1.8-12: 1111)
+Entering state 30
+Reading a token
+Next token is token '=' (1.14: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-4: 1111)
+   $2 = token '+' (1.6: )
+   $3 = nterm exp (1.8-12: 1111)
+-> $$ = nterm exp (1.1-12: 2222)
+Entering state 8
+Next token is token '=' (1.14: )
+Shifting token '=' (1.14: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.16: 1)
+Shifting token "number" (1.16: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-12: 2222)
+   $2 = token '=' (1.14: )
+   $3 = nterm exp (1.16: 1)
+1.1-16: error: 2222 != 1
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+stderr:
+./calc.at:1416:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '!' (1.2: )
+Shifting token '!' (1.2: )
+Entering state 5
+Reading a token
+Next token is token '!' (1.3: )
+Shifting token '!' (1.3: )
+Entering state 16
+Reducing stack 0 by rule 16 (line 116):
+   $1 = token '!' (1.2: )
+   $2 = token '!' (1.3: )
+Shifting token error (1.2-3: )
+Entering state 11
+Reading a token
+Next token is token ')' (1.4: )
+Shifting token ')' (1.4: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-3: )
+   $3 = token ')' (1.4: )
+-> $$ = nterm exp (1.1-4: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.6: )
+Shifting token '+' (1.6: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.8: )
+Shifting token '(' (1.8: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.9: 1)
+Shifting token "number" (1.9: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 1)
+-> $$ = nterm exp (1.9: 1)
+Entering state 12
+Reading a token
+Next token is token "number" (1.11: 2)
+1.11: syntax error, unexpected number
+Error: popping nterm exp (1.9: 1)
+Shifting token error (1.9-11: )
+Entering state 11
+Next token is token "number" (1.11: 2)
+Error: discarding token "number" (1.11: 2)
+Reading a token
+Next token is token ')' (1.12: )
+Entering state 11
+Next token is token ')' (1.12: )
+Shifting token ')' (1.12: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.8: )
+   $2 = token error (1.9-11: )
+   $3 = token ')' (1.12: )
+-> $$ = nterm exp (1.8-12: 1111)
+Entering state 30
+Reading a token
+Next token is token '=' (1.14: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-4: 1111)
+   $2 = token '+' (1.6: )
+   $3 = nterm exp (1.8-12: 1111)
+-> $$ = nterm exp (1.1-12: 2222)
+Entering state 8
+Next token is token '=' (1.14: )
+Shifting token '=' (1.14: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.16: 1)
+Shifting token "number" (1.16: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-12: 2222)
+   $2 = token '=' (1.14: )
+   $3 = nterm exp (1.16: 1)
+1.1-16: error: 2222 != 1
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token ')' (1.2: )
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Next token is token ')' (1.2: )
+Shifting token ')' (1.2: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.2: )
+-> $$ = nterm exp (1.1-2: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.6: )
+Shifting token '(' (1.6: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.7: 1)
+Shifting token "number" (1.7: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.7: 1)
+-> $$ = nterm exp (1.7: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.9: )
+Shifting token '+' (1.9: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.11: 1)
+Shifting token "number" (1.11: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.11: 1)
+-> $$ = nterm exp (1.11: 1)
+Entering state 30
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.7: 1)
+   $2 = token '+' (1.9: )
+   $3 = nterm exp (1.11: 1)
+-> $$ = nterm exp (1.7-11: 2)
+Entering state 12
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
+Reading a token
 Next token is token "number" (1.15: 1)
 Shifting token "number" (1.15: 1)
 Entering state 1
@@ -122006,6 +120129,16 @@
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1414: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Reading a token
@@ -122253,6 +120386,7 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1414: cat stderr
 ./calc.at:1416: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -122263,11 +120397,250 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+  | (- *) + (1 2) = 1
+./calc.at:1414:  $PREPARSER ./calc  input
 ./calc.at:1416: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
+Entering state 2
+Reading a token
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Reducing stack 0 by rule 15 (line 115):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
+Shifting token error (1.2-4: )
+Entering state 11
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Reading a token
+Next token is token ')' (1.5: )
+Entering state 11
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.10: 1)
+Shifting token "number" (1.10: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
+Entering state 12
+Reading a token
+Next token is token "number" (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
+Shifting token error (1.10-12: )
+Entering state 11
+Next token is token "number" (1.12: 2)
+Error: discarding token "number" (1.12: 2)
+Reading a token
+Next token is token ')' (1.13: )
+Entering state 11
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
+Entering state 30
+Reading a token
+Next token is token '=' (1.15: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
+Entering state 8
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.17: 1)
+Shifting token "number" (1.17: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
+Entering state 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1435: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
+./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
   | (!!) + (1 2) = 1
 ./calc.at:1416:  $PREPARSER ./calc  input
 stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
+Entering state 2
+Reading a token
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Reducing stack 0 by rule 15 (line 115):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
+Shifting token error (1.2-4: )
+Entering state 11
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Reading a token
+Next token is token ')' (1.5: )
+Entering state 11
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.10: 1)
+Shifting token "number" (1.10: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
+Entering state 12
+Reading a token
+Next token is token "number" (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
+Shifting token error (1.10-12: )
+Entering state 11
+Next token is token "number" (1.12: 2)
+Error: discarding token "number" (1.12: 2)
+Reading a token
+Next token is token ')' (1.13: )
+Entering state 11
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
+Entering state 30
+Reading a token
+Next token is token '=' (1.15: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
+Entering state 8
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.17: 1)
+Shifting token "number" (1.17: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
+Entering state 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 Starting parse
 Entering state 0
 Reading a token
@@ -122380,6 +120753,16 @@
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1414: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -122492,6 +120875,7 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1414: cat stderr
 ./calc.at:1416: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -122502,10 +120886,246 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+  | (* *) + (*) + (*)
+./calc.at:1414:  $PREPARSER ./calc  input
 ./calc.at:1416: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
+Reading a token
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Reading a token
+Next token is token ')' (1.5: )
+Entering state 11
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
+Entering state 11
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
+Reading a token
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.9-11: 1111)
+Entering state 30
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
+Entering state 8
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
+Reading a token
+Next token is token ')' (1.17: )
+Entering state 11
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 30
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
+Entering state 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-17: 3333)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
+stderr:
   | (- *) + (1 2) = 1
 ./calc.at:1416:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
+Reading a token
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Reading a token
+Next token is token ')' (1.5: )
+Entering state 11
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
+Entering state 11
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
+Reading a token
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.9-11: 1111)
+Entering state 30
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
+Entering state 8
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
+Reading a token
+Next token is token ')' (1.17: )
+Entering state 11
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 30
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
+Entering state 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-17: 3333)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 stderr:
 Starting parse
 Entering state 0
@@ -122624,6 +121244,16 @@
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1414: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -122741,6 +121371,7 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1414: cat stderr
 ./calc.at:1416: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -122751,11 +121382,148 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1414:  $PREPARSER ./calc  input
 ./calc.at:1416: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Reducing stack 0 by rule 17 (line 117):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
+stderr:
   | (* *) + (*) + (*)
 ./calc.at:1416:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Reducing stack 0 by rule 17 (line 117):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
 stderr:
+input:
 Starting parse
 Entering state 0
 Reading a token
@@ -122871,11 +121639,80 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+  | 1 + 2 * 3 + !- ++
+./calc.at:1414:  $PREPARSER ./calc  input
 ./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
 Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Reducing stack 0 by rule 18 (line 118):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+stderr:
+./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Reading a token
 Next token is token '(' (1.1: )
 Shifting token '(' (1.1: )
 Entering state 4
@@ -122988,6 +121825,72 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Reducing stack 0 by rule 18 (line 118):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
 ./calc.at:1416: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -122998,11 +121901,93 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1414: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1414: cat stderr
 ./calc.at:1416: cat stderr
 input:
+  | 1 + 2 * 3 + !* ++
+./calc.at:1414:  $PREPARSER ./calc  input
+input:
+stderr:
   | 1 + 2 * 3 + !+ ++
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '*' (1.14: )
+Shifting token '*' (1.14: )
+Entering state 15
+Reducing stack 0 by rule 19 (line 119):
+   $1 = token '!' (1.13: )
+   $2 = token '*' (1.14: )
+1.14: memory exhausted
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
 ./calc.at:1416:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -123127,6 +122112,73 @@
 Shifting token '!' (1.13: )
 Entering state 5
 Reading a token
+Next token is token '*' (1.14: )
+Shifting token '*' (1.14: )
+Entering state 15
+Reducing stack 0 by rule 19 (line 119):
+   $1 = token '!' (1.13: )
+   $2 = token '*' (1.14: )
+1.14: memory exhausted
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 30
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 22
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 31
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 101):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 30
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 21
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
 Next token is token '+' (1.14: )
 Shifting token '+' (1.14: )
 Entering state 14
@@ -123135,10 +122187,21 @@
    $2 = token '+' (1.14: )
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1414: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 input:
   | 1 + 2 * 3 + !- ++
 ./calc.at:1416:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1414: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -123205,6 +122268,10 @@
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
 ./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | (#) + (#) = 2222
+./calc.at:1414:  $PREPARSER ./calc  input
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -123271,6 +122338,104 @@
    $2 = token '-' (1.14: )
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
+Reading a token
+Next token is token ')' (1.3: )
+Entering state 11
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
+Entering state 4
+Reading a token
+1.8: syntax error: invalid character: '#'
+Next token is token error (1.8: )
+Shifting token error (1.8: )
+Entering state 11
+Next token is token error (1.8: )
+Error: discarding token error (1.8: )
+Reading a token
+Next token is token ')' (1.9: )
+Entering state 11
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.7: )
+   $2 = token error (1.8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
+Entering state 30
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
+Entering state 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.13-16: 2222)
+Shifting token "number" (1.13-16: 2222)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 ./calc.at:1416: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
@@ -123282,6 +122447,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
 stdout:
 ./calc.at:1431: "$PERL" -ne '
   chomp;
@@ -123294,9 +122460,118 @@
         || /\t/
         )' calc.cc
 
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
+Reading a token
+Next token is token ')' (1.3: )
+Entering state 11
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
+Entering state 21
+Reading a token
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
+Entering state 4
+Reading a token
+1.8: syntax error: invalid character: '#'
+Next token is token error (1.8: )
+Shifting token error (1.8: )
+Entering state 11
+Next token is token error (1.8: )
+Error: discarding token error (1.8: )
+Reading a token
+Next token is token ')' (1.9: )
+Entering state 11
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.7: )
+   $2 = token error (1.8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
+Entering state 30
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
+Entering state 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.13-16: 2222)
+Shifting token "number" (1.13-16: 2222)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1416: cat stderr
 input:
 input:
+  | 1 + 2 * 3 + !* ++
+./calc.at:1416:  $PREPARSER ./calc  input
+./calc.at:1414: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -123311,9 +122586,11 @@
   | 2^2^3 = 256
   | (2^2)^3 = 64
 ./calc.at:1431:  $PREPARSER ./calc  input
-  | 1 + 2 * 3 + !* ++
-./calc.at:1416:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1414: cat stderr
+stderr:
+stdout:
+./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -123381,13 +122658,24 @@
 1.14: memory exhausted
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 ./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1431: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 stderr:
 input:
-  | 1 2
+stderr:
+./calc.at:1431: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+./calc.at:1432: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc
+
+  | (1 + #) = 1111
+./calc.at:1414:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -123454,12 +122742,101 @@
 1.14: memory exhausted
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1431:  $PREPARSER ./calc  input
 stderr:
-syntax error
-./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1432:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
+Reading a token
+Next token is token ')' (1.7: )
+Entering state 11
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+input:
 stderr:
-syntax error
 ./calc.at:1416: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -123470,8 +122847,112 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 2
+./calc.at:1431:  $PREPARSER ./calc  input
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
+Reading a token
+Next token is token ')' (1.7: )
+Entering state 11
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+syntax error
+./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1432: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 ./calc.at:1416: cat stderr
-./calc.at:1431: "$PERL" -pi -e 'use strict;
+stderr:
+stdout:
+./calc.at:1433: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc
+
+stderr:
+input:
+  | 1 2
+./calc.at:1432:  $PREPARSER ./calc  input
+./calc.at:1414: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -123481,10 +122962,46 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+syntax error
+stderr:
+1.3: syntax error
+./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+./calc.at:1414: cat stderr
 input:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1433:  $PREPARSER ./calc  input
   | (#) + (#) = 2222
 ./calc.at:1416:  $PREPARSER ./calc  input
 stderr:
+1.3: syntax error
+input:
+./calc.at:1431: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+stderr:
+  | (# + 1) = 1111
+./calc.at:1414:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -123582,9 +123099,95 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1432: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1431: cat stderr
 stderr:
+./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1433: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Reading a token
+Next token is token "number" (1.6: 1)
+Error: discarding token "number" (1.6: 1)
+Reading a token
+Next token is token ')' (1.7: )
+Entering state 11
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+input:
+input:
 Starting parse
 Entering state 0
 Reading a token
@@ -123682,10 +123285,76 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-input:
-  | 1//2
-./calc.at:1431:  $PREPARSER ./calc  input
 stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Reading a token
+Next token is token "number" (1.6: 1)
+Error: discarding token "number" (1.6: 1)
+Reading a token
+Next token is token ')' (1.7: )
+Entering state 11
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1416: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -123696,15 +123365,43 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-syntax error
-./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 ./calc.at:1416: cat stderr
-syntax error
+  | 1 2
+  | 1//2
+./calc.at:1431:  $PREPARSER ./calc  input
+./calc.at:1432: cat stderr
+./calc.at:1433:  $PREPARSER ./calc  input
+./calc.at:1414: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+  | 1//2
+stderr:
+./calc.at:1432:  $PREPARSER ./calc  input
 input:
+1.3: syntax error
+./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
   | (1 + #) = 1111
 ./calc.at:1416:  $PREPARSER ./calc  input
+./calc.at:1414: cat stderr
+syntax error
+./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.3: syntax error
+./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 stderr:
+stderr:
+stderr:
+1.3: syntax error
 Starting parse
 Entering state 0
 Reading a token
@@ -123782,7 +123479,108 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1431: "$PERL" -pi -e 'use strict;
+1.3: syntax error
+input:
+syntax error
+./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (1 + # + 1) = 1111
+./calc.at:1414:  $PREPARSER ./calc  input
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Reading a token
+Next token is token "number" (1.10: 1)
+Error: discarding token "number" (1.10: 1)
+Reading a token
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.15-18: 1111)
+Shifting token "number" (1.15-18: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1433: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1432: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -123792,8 +123590,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -123871,7 +123667,18 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1431: cat stderr
+./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1431: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
 ./calc.at:1416: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -123882,19 +123689,124 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1432: cat stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Reading a token
+Next token is token "number" (1.10: 1)
+Error: discarding token "number" (1.10: 1)
+Reading a token
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 26
+Reducing stack 0 by rule 14 (line 114):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.15-18: 1111)
+Shifting token "number" (1.15-18: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 28
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack 0 by rule 6 (line 89):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1433: cat stderr
+./calc.at:1416: cat stderr
+./calc.at:1414: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 input:
+./calc.at:1431: cat stderr
   | error
-./calc.at:1431:  $PREPARSER ./calc  input
-./calc.at:1416: cat stderr
-stderr:
-syntax error
-./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1432:  $PREPARSER ./calc  input
+input:
 input:
 stderr:
+./calc.at:1414: cat stderr
+  | 1//2
+./calc.at:1433:  $PREPARSER ./calc  input
+1.1: syntax error
+./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | (# + 1) = 1111
 ./calc.at:1416:  $PREPARSER ./calc  input
-syntax error
 stderr:
+input:
+stderr:
+stderr:
+1.3: syntax error
+./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | error
+./calc.at:1431:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -123964,8 +123876,144 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+1.1: syntax error
 ./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
+stderr:
+  | (1 + 1) / (1 - 1)
+./calc.at:1414:  $PREPARSER ./calc  input
+syntax error
+./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.3: syntax error
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.6: 1)
+Shifting token "number" (1.6: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 30
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 113):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Reading a token
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 23
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.12: 1)
+Shifting token "number" (1.12: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.16: 1)
+Shifting token "number" (1.16: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 29
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack 0 by rule 8 (line 100):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 113):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 32
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 10 (line 102):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 stderr:
+./calc.at:1432: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Reading a token
@@ -124035,7 +124083,125 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1431: "$PERL" -pi -e 'use strict;
+./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.6: 1)
+Shifting token "number" (1.6: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 30
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack 0 by rule 7 (line 99):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 113):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Reading a token
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 23
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.12: 1)
+Shifting token "number" (1.12: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.16: 1)
+Shifting token "number" (1.16: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 88):
+   $1 = token "number" (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 29
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack 0 by rule 8 (line 100):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 27
+Reducing stack 0 by rule 13 (line 113):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 32
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 10 (line 102):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 25
+Reducing stack 0 by rule 4 (line 84):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 78):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 17
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1433: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -124055,15 +124221,50 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1431: cat stderr
 ./calc.at:1416: cat stderr
+./calc.at:1431: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1432: cat stderr
+./calc.at:1414: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1433: cat stderr
+./calc.at:1431: cat stderr
 input:
+input:
+./calc.at:1414: cat stderr
+  | error
+./calc.at:1433:  $PREPARSER ./calc  input
   | (1 + # + 1) = 1111
 ./calc.at:1416:  $PREPARSER ./calc  input
 input:
 stderr:
+1.1: syntax error
+stderr:
+./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 = 2 = 3
+./calc.at:1432:  $PREPARSER ./calc  input
+520. calc.at:1414:  ok
+input:
+stderr:
   | 1 = 2 = 3
 ./calc.at:1431:  $PREPARSER ./calc  input
+1.1: syntax error
 Starting parse
 Entering state 0
 Reading a token
@@ -124150,10 +124351,24 @@
 stderr:
 syntax error
 ./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.7: syntax error
+./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
-syntax error
+stderr:
+
+./calc.at:1433: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Reading a token
@@ -124237,7 +124452,9 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1431: "$PERL" -pi -e 'use strict;
+1.7: syntax error
+syntax error
+./calc.at:1432: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -124257,11 +124474,41 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1433: cat stderr
+./calc.at:1431: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1416: cat stderr
 input:
+  | 1 = 2 = 3
+./calc.at:1433:  $PREPARSER ./calc  input
+./calc.at:1432: cat stderr
+stderr:
+1.7: syntax error
+./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
+input:
+./calc.at:1431: cat stderr
+input:
+1.7: syntax error
+  | 
+  | +1
   | (1 + 1) / (1 - 1)
+  | 
+  | +1
 ./calc.at:1416:  $PREPARSER ./calc  input
-./calc.at:1431: cat stderr
+./calc.at:1431:  $PREPARSER ./calc  input
+./calc.at:1432:  $PREPARSER ./calc  input
+stderr:
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -124378,12 +124625,30 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-input:
+528. calc.at:1437: testing Calculator C++ %locations parse.error=verbose api.prefix={calc} %verbose  ...
+syntax error
+./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+2.1: syntax error
+./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1437: mv calc.y.tmp calc.y
+
+stderr:
 ./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1433: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
-  | 
-  | +1
-./calc.at:1431:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+2.1: syntax error
+syntax error
 Starting parse
 Entering state 0
 Reading a token
@@ -124499,11 +124764,7 @@
 Entering state 17
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-stderr:
-syntax error
-./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1416: "$PERL" -pi -e 'use strict;
+./calc.at:1432: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -124513,8 +124774,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-syntax error
-./calc.at:1416: cat stderr
 ./calc.at:1431: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -124525,16 +124784,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-521. calc.at:1416:  ok
-./calc.at:1431: cat stderr
-./calc.at:1431:  $PREPARSER ./calc  /dev/null
-
-stderr:
-syntax error
-./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-./calc.at:1431: "$PERL" -pi -e 'use strict;
+./calc.at:1433: cat stderr
+./calc.at:1416: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -124546,26 +124797,31 @@
 ' expout || exit 77
 ./calc.at:1431: cat stderr
 input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1431:  $PREPARSER ./calc  input
+  | 
+  | +1
+./calc.at:1416: cat stderr
+./calc.at:1433:  $PREPARSER ./calc  input
+./calc.at:1432: cat stderr
+./calc.at:1432:  $PREPARSER ./calc  /dev/null
 stderr:
-529. calc.at:1438: testing Calculator C++ %locations parse.error=verbose %debug %name-prefix "calc" %verbose  ...
-syntax error
-syntax error
-syntax error
+521. calc.at:1416:  ok
+./calc.at:1431:  $PREPARSER ./calc  /dev/null
+2.1: syntax error
+./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+1.1: syntax error
 syntax error
-error: 4444 != 1
 ./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1438: mv calc.y.tmp calc.y
-
-./calc.at:1438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
+stderr:
+2.1: syntax error
+1.1: syntax error
 syntax error
-syntax error
-syntax error
-syntax error
-error: 4444 != 1
-./calc.at:1431: "$PERL" -pi -e 'use strict;
+./calc.at:1437: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+./calc.at:1432: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -124575,17 +124831,17 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1431: cat stderr
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1431:  $PREPARSER ./calc  input
-stderr:
-syntax error
-error: 2222 != 1
-./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-error: 2222 != 1
+./calc.at:1433: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+
 ./calc.at:1431: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -124596,21 +124852,45 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1438: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+./calc.at:1432: cat stderr
+./calc.at:1433: cat stderr
+./calc.at:1433:  $PREPARSER ./calc  /dev/null
 ./calc.at:1431: cat stderr
+stderr:
 input:
-  | (- *) + (1 2) = 1
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1432:  $PREPARSER ./calc  input
+1.1: syntax error
+./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+stderr:
 ./calc.at:1431:  $PREPARSER ./calc  input
+1.1: syntax error
+stderr:
 stderr:
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+529. calc.at:1438: testing Calculator C++ %locations parse.error=verbose %debug %name-prefix "calc" %verbose  ...
+./calc.at:1438: mv calc.y.tmp calc.y
+
 syntax error
 syntax error
-error: 2222 != 1
-./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 syntax error
 syntax error
-error: 2222 != 1
-./calc.at:1431: "$PERL" -pi -e 'use strict;
+error: 4444 != 1
+./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1433: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -124620,19 +124900,25 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1431: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1431:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 syntax error
 syntax error
 syntax error
-./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-syntax error
 syntax error
+error: 4444 != 1
+./calc.at:1432: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1433: cat stderr
+./calc.at:1432: cat stderr
 ./calc.at:1431: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -124643,21 +124929,36 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1431: cat stderr
 input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1431:  $PREPARSER ./calc  input
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1433:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1431: cat stderr
+  | (!!) + (1 2) = 1
+./calc.at:1432:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1431: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+stderr:
+./calc.at:1438: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+1.11: syntax error
+1.1-16: error: 2222 != 1
+./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | 1 + 2 * 3 + !- ++
+  | (!!) + (1 2) = 1
 ./calc.at:1431:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1431: "$PERL" -pi -e 'use strict;
+./calc.at:1433: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -124667,18 +124968,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1431: cat stderr
-input:
-  | (#) + (#) = 2222
-./calc.at:1431:  $PREPARSER ./calc  input
 stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
+1.11: syntax error
+1.1-16: error: 2222 != 1
+syntax error
+error: 2222 != 1
 ./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1431: "$PERL" -pi -e 'use strict;
+syntax error
+error: 2222 != 1
+./calc.at:1432: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -124688,15 +124987,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1431: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1431:  $PREPARSER ./calc  input
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1433: cat stderr
+./calc.at:1432: cat stderr
 stderr:
-syntax error: invalid character: '#'
+stdout:
+input:
 ./calc.at:1431: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -124707,18 +125002,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1431: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1431:  $PREPARSER ./calc  input
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error: invalid character: '#'
-stderr:
-stdout:
-./calc.at:1432: "$PERL" -ne '
+  | (!!) + (1 2) = 1
+./calc.at:1433:  $PREPARSER ./calc  input
+./calc.at:1434: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -124729,17 +125015,15 @@
         || /\t/
         )' calc.cc
 
+stderr:
+1.11: syntax error
+1.1-16: error: 2222 != 1
+./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-./calc.at:1431: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+input:
+  | (- *) + (1 2) = 1
+./calc.at:1432:  $PREPARSER ./calc  input
+./calc.at:1431: cat stderr
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -124753,39 +125037,32 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1432:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1434:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1431: cat stderr
-./calc.at:1432: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | (1 + # + 1) = 1111
 input:
-  | 1 2
+./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+  | (- *) + (1 2) = 1
 ./calc.at:1431:  $PREPARSER ./calc  input
-./calc.at:1432:  $PREPARSER ./calc  input
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
-1.3: syntax error
-./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-syntax error: invalid character: '#'
-./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.11: syntax error
+1.1-16: error: 2222 != 1
 stderr:
-1.3: syntax error
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1432: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1431: "$PERL" -pi -e 'use strict;
+syntax error
+syntax error
+error: 2222 != 1
+./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1434: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+./calc.at:1433: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -124795,22 +125072,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1432: cat stderr
-input:
-./calc.at:1431: cat stderr
-  | 1//2
-./calc.at:1432:  $PREPARSER ./calc  input
-stderr:
-1.3: syntax error
-./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-stderr:
-  | (1 + 1) / (1 - 1)
-./calc.at:1431:  $PREPARSER ./calc  input
-1.3: syntax error
 stderr:
-error: null divisor
-./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
+syntax error
+error: 2222 != 1
 ./calc.at:1432: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -124821,12 +125086,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+  | 1 2
+./calc.at:1434:  $PREPARSER ./calc  input
 stderr:
-error: null divisor
+./calc.at:1433: cat stderr
 ./calc.at:1432: cat stderr
-input:
-  | error
-./calc.at:1432:  $PREPARSER ./calc  input
 ./calc.at:1431: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -124837,14 +125102,28 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+1.3: syntax error
+./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+input:
 stderr:
-1.1: syntax error
-./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (- *) + (1 2) = 1
+./calc.at:1433:  $PREPARSER ./calc  input
+stderr:
+1.3: syntax error
+  | (* *) + (*) + (*)
+./calc.at:1432:  $PREPARSER ./calc  input
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.1: syntax error
 ./calc.at:1431: cat stderr
-523. calc.at:1431:  ok
-./calc.at:1432: "$PERL" -pi -e 'use strict;
+stderr:
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+./calc.at:1434: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -124854,17 +125133,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1432: cat stderr
-
-input:
-  | 1 = 2 = 3
-./calc.at:1432:  $PREPARSER ./calc  input
-stderr:
-1.7: syntax error
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
 ./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.7: syntax error
-./calc.at:1432: "$PERL" -pi -e 'use strict;
+input:
+./calc.at:1433: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -124874,20 +125148,28 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1432: cat stderr
-input:
-530. calc.at:1440: testing Calculator C++ %locations parse.error=verbose %debug api.prefix={calc} %verbose  ...
-  | 
-  | +1
-./calc.at:1432:  $PREPARSER ./calc  input
-./calc.at:1440: mv calc.y.tmp calc.y
-
-./calc.at:1440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+  | (* *) + (*) + (*)
+./calc.at:1431:  $PREPARSER ./calc  input
 stderr:
-2.1: syntax error
-./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-2.1: syntax error
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+./calc.at:1434: cat stderr
+syntax error
+syntax error
+syntax error
+./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1433: cat stderr
+stderr:
+syntax error
+syntax error
+syntax error
+input:
+  | 1//2
+./calc.at:1434:  $PREPARSER ./calc  input
+stderr:
+input:
 ./calc.at:1432: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -124898,17 +125180,24 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1432: cat stderr
-./calc.at:1432:  $PREPARSER ./calc  /dev/null
+1.3: syntax error
+./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (* *) + (*) + (*)
+./calc.at:1433:  $PREPARSER ./calc  input
 stderr:
-1.1: syntax error
-./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1440: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
-1.1: syntax error
 stderr:
+stderr:
+1.3: syntax error
 stdout:
-./calc.at:1432: "$PERL" -pi -e 'use strict;
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+./calc.at:1431: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -124918,7 +125207,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1434: "$PERL" -ne '
+./calc.at:1426: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -124927,10 +125216,30 @@
         || /\s$/
         # No tabs.
         || /\t/
-        )' calc.cc
+        )' calc.cc calc.hh
 
-input:
 ./calc.at:1432: cat stderr
+input:
+./calc.at:1433: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1434: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -124944,61 +125253,43 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1434:  $PREPARSER ./calc  input
-stderr:
-stderr:
+./calc.at:1426:  $PREPARSER ./calc  input
 input:
-./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stdout:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+stderr:
+  | 1 + 2 * 3 + !+ ++
 ./calc.at:1432:  $PREPARSER ./calc  input
-./calc.at:1433: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc
-
+./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1431: cat stderr
+./calc.at:1434: cat stderr
 stderr:
 stderr:
-./calc.at:1434: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
+input:
+./calc.at:1426: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 ./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | error
+./calc.at:1434:  $PREPARSER ./calc  input
+./calc.at:1433: cat stderr
+stderr:
+stderr:
 input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
+1.1: syntax error
+./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
+./calc.at:1432: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+  | 1 + 2 * 3 + !+ ++
+1.1: syntax error
+./calc.at:1431:  $PREPARSER ./calc  input
+  | 1 + 2 * 3 + !+ ++
 ./calc.at:1433:  $PREPARSER ./calc  input
+input:
 stderr:
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
 stderr:
-input:
   | 1 2
-./calc.at:1434:  $PREPARSER ./calc  input
+./calc.at:1426:  $PREPARSER ./calc  input
 ./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1432: "$PERL" -pi -e 'use strict;
+input:
+./calc.at:1434: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125008,15 +125299,33 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 + !- ++
+./calc.at:1432:  $PREPARSER ./calc  input
 stderr:
-1.3: syntax error
-./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
+syntax error
+./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1431: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 ./calc.at:1433: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-1.3: syntax error
-./calc.at:1432: cat stderr
-./calc.at:1434: "$PERL" -pi -e 'use strict;
+stderr:
+stderr:
+./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
+stderr:
+./calc.at:1434: cat stderr
+input:
+input:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1433:  $PREPARSER ./calc  input
+  | 1 + 2 * 3 + !- ++
+./calc.at:1431:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
+./calc.at:1432: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125026,26 +125335,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | 1 2
-./calc.at:1433:  $PREPARSER ./calc  input
-input:
-stderr:
-1.3: syntax error
-  | (!!) + (1 2) = 1
-./calc.at:1432:  $PREPARSER ./calc  input
-./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-1.11: syntax error
-1.1-16: error: 2222 != 1
-./calc.at:1434: cat stderr
-./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.3: syntax error
 stderr:
-1.11: syntax error
-1.1-16: error: 2222 != 1
-./calc.at:1433: "$PERL" -pi -e 'use strict;
+./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 = 2 = 3
+./calc.at:1434:  $PREPARSER ./calc  input
+./calc.at:1426: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125055,11 +125349,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | 1//2
-./calc.at:1434:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1432: "$PERL" -pi -e 'use strict;
+stderr:
+1.7: syntax error
+./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1433: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125069,16 +125363,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.3: syntax error
-./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1433: cat stderr
-1.3: syntax error
-input:
 ./calc.at:1432: cat stderr
-  | 1//2
-./calc.at:1433:  $PREPARSER ./calc  input
-./calc.at:1434: "$PERL" -pi -e 'use strict;
+1.7: syntax error
+./calc.at:1431: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125088,25 +125376,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-1.3: syntax error
 input:
-./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.3: syntax error
-  | (- *) + (1 2) = 1
+./calc.at:1426: cat stderr
+  | (#) + (#) = 2222
 ./calc.at:1432:  $PREPARSER ./calc  input
-stderr:
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-./calc.at:1434: cat stderr
-./calc.at:1433: "$PERL" -pi -e 'use strict;
+./calc.at:1434: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125116,11 +125390,29 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+./calc.at:1431: cat stderr
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 input:
-  | error
 ./calc.at:1433: cat stderr
-./calc.at:1434:  $PREPARSER ./calc  input
+./calc.at:1434: cat stderr
+  | (#) + (#) = 2222
+./calc.at:1431:  $PREPARSER ./calc  input
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+input:
+  | 1//2
+./calc.at:1426:  $PREPARSER ./calc  input
 stderr:
+stderr:
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+syntax error
+./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1432: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -125131,56 +125423,36 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+stderr:
 input:
-1.1: syntax error
-./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | error
+input:
+  | (#) + (#) = 2222
+  | 
+  | +1
 ./calc.at:1433:  $PREPARSER ./calc  input
+./calc.at:1434:  $PREPARSER ./calc  input
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
 stderr:
-stderr:
-1.1: syntax error
-1.1: syntax error
-./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
 ./calc.at:1432: cat stderr
-./calc.at:1434: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+stderr:
+2.1: syntax error
+./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | (* *) + (*) + (*)
+  | (1 + #) = 1111
 ./calc.at:1432:  $PREPARSER ./calc  input
 stderr:
-1.1: syntax error
-./calc.at:1434: cat stderr
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+2.1: syntax error
 stderr:
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
+1.6: syntax error: invalid character: '#'
 ./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-input:
-  | 1 = 2 = 3
-./calc.at:1434:  $PREPARSER ./calc  input
-./calc.at:1433: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1432: "$PERL" -pi -e 'use strict;
+./calc.at:1431: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125190,16 +125462,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-1.7: syntax error
-./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1433: cat stderr
-stderr:
-input:
-./calc.at:1432: cat stderr
-1.7: syntax error
-  | 1 = 2 = 3
-./calc.at:1433:  $PREPARSER ./calc  input
 ./calc.at:1434: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -125211,19 +125473,10 @@
   }eg
 ' expout || exit 77
 stderr:
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1432:  $PREPARSER ./calc  input
-1.7: syntax error
-./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.7: syntax error
-stderr:
-./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1434: cat stderr
-input:
-stderr:
-./calc.at:1433: "$PERL" -pi -e 'use strict;
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+1.6: syntax error: invalid character: '#'
+./calc.at:1426: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125233,27 +125486,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1432: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-  | 
-  | +1
-./calc.at:1434:  $PREPARSER ./calc  input
-stderr:
-2.1: syntax error
-input:
-./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1 + 2 * 3 + !- ++
-./calc.at:1432:  $PREPARSER ./calc  input
-./calc.at:1433: cat stderr
-stderr:
-stderr:
-./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-2.1: syntax error
-stderr:
-  | 
-  | +1
-./calc.at:1433:  $PREPARSER ./calc  input
-./calc.at:1434: "$PERL" -pi -e 'use strict;
+./calc.at:1431: cat stderr
+./calc.at:1433: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125263,11 +125497,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-2.1: syntax error
-./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-2.1: syntax error
 ./calc.at:1432: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -125278,37 +125507,36 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1433: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1426: cat stderr
+input:
+  | (1 + #) = 1111
+./calc.at:1431:  $PREPARSER ./calc  input
+stderr:
 ./calc.at:1434: cat stderr
+input:
 ./calc.at:1434:  $PREPARSER ./calc  /dev/null
-./calc.at:1433: cat stderr
-./calc.at:1432: cat stderr
+syntax error: invalid character: '#'
+./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | error
+./calc.at:1426:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1432: cat stderr
 1.1: syntax error
 ./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1433:  $PREPARSER ./calc  /dev/null
-input:
 stderr:
-  | (#) + (#) = 2222
-./calc.at:1432:  $PREPARSER ./calc  input
+syntax error
+./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1433: cat stderr
 stderr:
-1.1: syntax error
-./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 1.1: syntax error
 stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
+input:
+syntax error: invalid character: '#'
+  | (# + 1) = 1111
+./calc.at:1432:  $PREPARSER ./calc  input
+syntax error
 stderr:
-./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1434: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -125319,9 +125547,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.1: syntax error
-stderr:
-./calc.at:1433: "$PERL" -pi -e 'use strict;
+./calc.at:1426: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125331,14 +125557,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1434: cat stderr
 input:
-./calc.at:1433: cat stderr
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1434:  $PREPARSER ./calc  input
-./calc.at:1432: "$PERL" -pi -e 'use strict;
+  | (1 + #) = 1111
+1.2: syntax error: invalid character: '#'
+./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1433:  $PREPARSER ./calc  input
+./calc.at:1431: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125349,30 +125573,13 @@
   }eg
 ' expout || exit 77
 stderr:
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
-./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-stderr:
-./calc.at:1433:  $PREPARSER ./calc  input
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
-./calc.at:1432: cat stderr
 stderr:
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
+1.2: syntax error: invalid character: '#'
+./calc.at:1426: cat stderr
+1.6: syntax error: invalid character: '#'
 ./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1434: "$PERL" -pi -e 'use strict;
+./calc.at:1434: cat stderr
+./calc.at:1432: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125382,34 +125589,39 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1431: cat stderr
+stderr:
 input:
-  | (1 + #) = 1111
+  | 1 = 2 = 3
+./calc.at:1426:  $PREPARSER ./calc  input
+./calc.at:1432: cat stderr
+1.6: syntax error: invalid character: '#'
+stderr:
+input:
+input:
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1434:  $PREPARSER ./calc  input
+  | (1 + # + 1) = 1111
 ./calc.at:1432:  $PREPARSER ./calc  input
+syntax error
 stderr:
+./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (# + 1) = 1111
+./calc.at:1431:  $PREPARSER ./calc  input
 stderr:
 1.6: syntax error: invalid character: '#'
 ./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1434: cat stderr
 1.2: syntax error
 1.18: syntax error
 1.23: syntax error
 1.41: syntax error
 1.1-46: error: 4444 != 1
+./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
 stderr:
 1.6: syntax error: invalid character: '#'
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1432: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1434:  $PREPARSER ./calc  input
 stderr:
 ./calc.at:1433: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
@@ -125421,15 +125633,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.11: syntax error
-1.1-16: error: 2222 != 1
-./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1433: cat stderr
+syntax error: invalid character: '#'
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
 stderr:
-1.11: syntax error
-1.1-16: error: 2222 != 1
-./calc.at:1432: cat stderr
-input:
+syntax error: invalid character: '#'
 ./calc.at:1434: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -125440,34 +125653,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | (!!) + (1 2) = 1
-./calc.at:1433:  $PREPARSER ./calc  input
-stderr:
-input:
-  | (# + 1) = 1111
-./calc.at:1432:  $PREPARSER ./calc  input
-1.11: syntax error
-1.1-16: error: 2222 != 1
-./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1434: cat stderr
-stderr:
-1.2: syntax error: invalid character: '#'
-./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.11: syntax error
-1.1-16: error: 2222 != 1
-input:
-stderr:
-1.2: syntax error: invalid character: '#'
-  | (- *) + (1 2) = 1
-./calc.at:1434:  $PREPARSER ./calc  input
-stderr:
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1432: "$PERL" -pi -e 'use strict;
+./calc.at:1433: cat stderr
+./calc.at:1431: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125477,10 +125664,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-./calc.at:1434: "$PERL" -pi -e 'use strict;
+./calc.at:1432: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125490,7 +125674,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1433: "$PERL" -pi -e 'use strict;
+input:
+./calc.at:1434: cat stderr
+./calc.at:1431: cat stderr
+./calc.at:1426: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125500,51 +125687,35 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1432: cat stderr
+  | (# + 1) = 1111
+./calc.at:1433:  $PREPARSER ./calc  input
 input:
-./calc.at:1434: cat stderr
+./calc.at:1432: cat stderr
   | (1 + # + 1) = 1111
-./calc.at:1432:  $PREPARSER ./calc  input
-./calc.at:1433: cat stderr
+./calc.at:1431:  $PREPARSER ./calc  input
+stderr:
+1.2: syntax error: invalid character: '#'
+./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
 stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (* *) + (*) + (*)
+stderr:
+./calc.at:1426: cat stderr
 input:
+  | (!!) + (1 2) = 1
+1.2: syntax error: invalid character: '#'
 ./calc.at:1434:  $PREPARSER ./calc  input
+syntax error: invalid character: '#'
+./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (1 + 1) / (1 - 1)
+./calc.at:1432:  $PREPARSER ./calc  input
 stderr:
+1.11: syntax error
+1.1-16: error: 2222 != 1
+input:
 stderr:
-  | (- *) + (1 2) = 1
-./calc.at:1433:  $PREPARSER ./calc  input
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-1.6: syntax error: invalid character: '#'
 ./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-stderr:
-./calc.at:1432: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
+1.11-17: error: null divisor
+./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1433: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -125555,6 +125726,20 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+stderr:
+  | 
+  | +1
+./calc.at:1426:  $PREPARSER ./calc  input
+1.11: syntax error
+1.1-16: error: 2222 != 1
+stderr:
+syntax error: invalid character: '#'
+1.11-17: error: null divisor
+stderr:
+syntax error
+./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./calc.at:1434: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -125565,37 +125750,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1432: cat stderr
-./calc.at:1434: cat stderr
 ./calc.at:1433: cat stderr
 input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1432:  $PREPARSER ./calc  input
-input:
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1434:  $PREPARSER ./calc  input
-stderr:
-  | (* *) + (*) + (*)
-stderr:
-./calc.at:1433:  $PREPARSER ./calc  input
-./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.11-17: error: null divisor
-./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-stderr:
-./calc.at:1434: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-1.11-17: error: null divisor
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-./calc.at:1432: "$PERL" -pi -e 'use strict;
+syntax error
+./calc.at:1431: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125605,7 +125763,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1433: "$PERL" -pi -e 'use strict;
+  | (1 + # + 1) = 1111
+./calc.at:1433:  $PREPARSER ./calc  input
+./calc.at:1432: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125615,17 +125775,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1434:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1434: cat stderr
+1.6: syntax error: invalid character: '#'
+./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1432: cat stderr
-./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-524. calc.at:1432:  ok
-./calc.at:1433: cat stderr
-input:
-./calc.at:1434: "$PERL" -pi -e 'use strict;
+./calc.at:1431: cat stderr
+./calc.at:1426: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125635,30 +125791,29 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1433:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1434: cat stderr
+524. calc.at:1432:  ok
 stderr:
-./calc.at:1433: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 input:
-input:
-  | (#) + (#) = 2222
+  | (- *) + (1 2) = 1
 ./calc.at:1434:  $PREPARSER ./calc  input
-  | 1 + 2 * 3 + !- ++
-./calc.at:1433:  $PREPARSER ./calc  input
-stderr:
+1.6: syntax error: invalid character: '#'
+input:
 stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
+  | (1 + 1) / (1 - 1)
+./calc.at:1431:  $PREPARSER ./calc  input
+
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
 ./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
+./calc.at:1426: cat stderr
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+error: null divisor
+./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1433: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -125669,51 +125824,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-531. calc.at:1441: testing Calculator C++ %locations parse.error=verbose %debug api.prefix={calc} api.token.prefix={TOK_} %verbose  ...
-./calc.at:1441: mv calc.y.tmp calc.y
-
-./calc.at:1441: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1434: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1433: cat stderr
-./calc.at:1434: cat stderr
-input:
-input:
-  | (#) + (#) = 2222
-./calc.at:1433:  $PREPARSER ./calc  input
-  | (1 + #) = 1111
-./calc.at:1434:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
+./calc.at:1426:  $PREPARSER ./calc  /dev/null
 stderr:
-./calc.at:1441: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
-1.6: syntax error: invalid character: '#'
-./calc.at:1433: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+error: null divisor
 ./calc.at:1434: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -125724,20 +125838,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+syntax error
+./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1433: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1433:  $PREPARSER ./calc  input
 stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
 ./calc.at:1434: cat stderr
-stderr:
-1.6: syntax error: invalid character: '#'
 input:
-  | (# + 1) = 1111
-./calc.at:1434:  $PREPARSER ./calc  input
-./calc.at:1433: "$PERL" -pi -e 'use strict;
+  | (1 + 1) / (1 - 1)
+./calc.at:1433:  $PREPARSER ./calc  input
+./calc.at:1431: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125748,50 +125858,13 @@
   }eg
 ' expout || exit 77
 stderr:
-1.2: syntax error: invalid character: '#'
-./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1433: cat stderr
-stderr:
-1.2: syntax error: invalid character: '#'
 input:
-./calc.at:1434: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-  | (# + 1) = 1111
-./calc.at:1433:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1434: cat stderr
-1.2: syntax error: invalid character: '#'
+1.11-17: error: null divisor
 ./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-input:
-  | (1 + # + 1) = 1111
+  | (* *) + (*) + (*)
 ./calc.at:1434:  $PREPARSER ./calc  input
-1.2: syntax error: invalid character: '#'
-stderr:
-./calc.at:1433: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-1.6: syntax error: invalid character: '#'
-./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1433: cat stderr
-./calc.at:1434: "$PERL" -pi -e 'use strict;
+530. calc.at:1440: testing Calculator C++ %locations parse.error=verbose %debug api.prefix={calc} %verbose  ...
+./calc.at:1426: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125801,33 +125874,23 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1433:  $PREPARSER ./calc  input
-./calc.at:1434: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
+./calc.at:1440: mv calc.y.tmp calc.y
+
+./calc.at:1431: cat stderr
 stderr:
-./calc.at:1434:  $PREPARSER ./calc  input
 stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.11-17: error: null divisor
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
 ./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.6: syntax error: invalid character: '#'
-stderr:
-./calc.at:1433: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 1.11-17: error: null divisor
+./calc.at:1426: cat stderr
+stderr:
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+523. calc.at:1431:  ok
 ./calc.at:1434: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -125838,21 +125901,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1433: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1434: cat stderr
-./calc.at:1433:  $PREPARSER ./calc  input
-526. calc.at:1434:  ok
-stderr:
-
-1.11-17: error: null divisor
-./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.11-17: error: null divisor
-532. calc.at:1443: testing Calculator C++ %header %locations parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
-./calc.at:1443: mv calc.y.tmp calc.y
-
 ./calc.at:1433: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -125863,56 +125911,39 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1443: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+input:
+./calc.at:1434: cat stderr
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1426:  $PREPARSER ./calc  input
 ./calc.at:1433: cat stderr
-525. calc.at:1433:  ok
-
-./calc.at:1443: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
-533. calc.at:1445: testing Calculator C++ parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
-./calc.at:1445: mv calc.y.tmp calc.y
-
-./calc.at:1445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1445: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 stderr:
-stdout:
-./calc.at:1426: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc calc.hh
 
 input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1426:  $PREPARSER ./calc  input
-stderr:
+525. calc.at:1433: syntax error
+syntax error
+syntax error
+syntax error
+error: 4444 != 1
 ./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+ ok
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1434:  $PREPARSER ./calc  input
+./calc.at:1440: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 stderr:
-./calc.at:1426: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | 1 2
-./calc.at:1426:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
 syntax error
-./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 syntax error
+syntax error
+error: 4444 != 1
+stderr:
+./calc.at:1434: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+
+input:
+  | 1 + 2 * 3 + !- ++
+531. calc.at:1441: testing Calculator C++ %locations parse.error=verbose %debug api.prefix={calc} api.token.prefix={TOK_} %verbose  ...
+./calc.at:1434:  $PREPARSER ./calc  input
 ./calc.at:1426: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -125923,16 +125954,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1426: cat stderr
-input:
-  | 1//2
-./calc.at:1426:  $PREPARSER ./calc  input
 stderr:
-syntax error
-./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1441: mv calc.y.tmp calc.y
+
+./calc.at:1441: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 stderr:
-syntax error
-./calc.at:1426: "$PERL" -pi -e 'use strict;
+./calc.at:1434: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125944,13 +125972,28 @@
 ' expout || exit 77
 ./calc.at:1426: cat stderr
 input:
-  | error
+  | (!!) + (1 2) = 1
 ./calc.at:1426:  $PREPARSER ./calc  input
 stderr:
+532. calc.at:1443: testing Calculator C++ %header %locations parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
+./calc.at:1443: mv calc.y.tmp calc.y
+
 syntax error
+error: 2222 != 1
+./calc.at:1434: cat stderr
 ./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 syntax error
+error: 2222 != 1
+./calc.at:1443: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+input:
+  | (#) + (#) = 2222
+./calc.at:1434:  $PREPARSER ./calc  input
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./calc.at:1426: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -125961,16 +126004,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1426: cat stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1426:  $PREPARSER ./calc  input
-stderr:
-syntax error
-./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-./calc.at:1426: "$PERL" -pi -e 'use strict;
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1441: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+./calc.at:1434: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -125981,15 +126018,28 @@
   }eg
 ' expout || exit 77
 ./calc.at:1426: cat stderr
+./calc.at:1434: cat stderr
 input:
-  | 
-  | +1
+  | (- *) + (1 2) = 1
 ./calc.at:1426:  $PREPARSER ./calc  input
+input:
 stderr:
+  | (1 + #) = 1111
+./calc.at:1434:  $PREPARSER ./calc  input
 syntax error
+syntax error
+error: 2222 != 1
 ./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
+1.6: syntax error: invalid character: '#'
+./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1443: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
 syntax error
+syntax error
+error: 2222 != 1
+stderr:
+1.6: syntax error: invalid character: '#'
 ./calc.at:1426: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -126000,14 +126050,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1426: cat stderr
-./calc.at:1426:  $PREPARSER ./calc  /dev/null
-stderr:
-syntax error
-./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-./calc.at:1426: "$PERL" -pi -e 'use strict;
+./calc.at:1434: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -126017,24 +126060,29 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1434: cat stderr
 ./calc.at:1426: cat stderr
 input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+input:
+  | (# + 1) = 1111
+./calc.at:1434:  $PREPARSER ./calc  input
+  | (* *) + (*) + (*)
 ./calc.at:1426:  $PREPARSER ./calc  input
 stderr:
+1.2: syntax error: invalid character: '#'
+./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 syntax error
 syntax error
 syntax error
-syntax error
-error: 4444 != 1
 ./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
+1.2: syntax error: invalid character: '#'
 syntax error
 syntax error
 syntax error
-syntax error
-error: 4444 != 1
-./calc.at:1426: "$PERL" -pi -e 'use strict;
+./calc.at:1434: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -126044,17 +126092,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1426: cat stderr
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1426:  $PREPARSER ./calc  input
-stderr:
-syntax error
-error: 2222 != 1
-./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-error: 2222 != 1
+./calc.at:1434: cat stderr
 ./calc.at:1426: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -126067,18 +126105,20 @@
 ' expout || exit 77
 ./calc.at:1426: cat stderr
 input:
-  | (- *) + (1 2) = 1
+  | (1 + # + 1) = 1111
+./calc.at:1434:  $PREPARSER ./calc  input
+stderr:
+input:
+1.6: syntax error: invalid character: '#'
+./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 + !+ ++
 ./calc.at:1426:  $PREPARSER ./calc  input
 stderr:
-syntax error
-syntax error
-error: 2222 != 1
+stderr:
+1.6: syntax error: invalid character: '#'
 ./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error
-syntax error
-error: 2222 != 1
-./calc.at:1426: "$PERL" -pi -e 'use strict;
+./calc.at:1434: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -126088,19 +126128,21 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1426: cat stderr
+./calc.at:1426: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 input:
-  | (* *) + (*) + (*)
+  | 1 + 2 * 3 + !- ++
 ./calc.at:1426:  $PREPARSER ./calc  input
 stderr:
-syntax error
-syntax error
-syntax error
+./calc.at:1434: cat stderr
 ./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error
-syntax error
-syntax error
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1434:  $PREPARSER ./calc  input
+stderr:
+1.11-17: error: null divisor
+./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./calc.at:1426: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -126111,21 +126153,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1426: cat stderr
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1426:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1426: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1426:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1426: "$PERL" -pi -e 'use strict;
+1.11-17: error: null divisor
+./calc.at:1434: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -126139,27 +126168,16 @@
 input:
   | (#) + (#) = 2222
 ./calc.at:1426:  $PREPARSER ./calc  input
+./calc.at:1434: cat stderr
 stderr:
 syntax error: invalid character: '#'
 syntax error: invalid character: '#'
 ./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+526. calc.at:1434:  ok
 stderr:
 syntax error: invalid character: '#'
 syntax error: invalid character: '#'
-stdout:
-./calc.at:1437: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc
 
-input:
 ./calc.at:1426: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -126170,41 +126188,19 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1437:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1426: cat stderr
-stderr:
-./calc.at:1437: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 input:
   | (1 + #) = 1111
 ./calc.at:1426:  $PREPARSER ./calc  input
 stderr:
 syntax error: invalid character: '#'
 ./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
 stderr:
-  | 1 2
-./calc.at:1437:  $PREPARSER ./calc  input
+533. calc.at:1445: testing Calculator C++ parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
+./calc.at:1445: mv calc.y.tmp calc.y
+
 syntax error: invalid character: '#'
-stderr:
-1.3: syntax error, unexpected number
-./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.3: syntax error, unexpected number
+./calc.at:1445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 ./calc.at:1426: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -126215,44 +126211,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1437: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 ./calc.at:1426: cat stderr
-./calc.at:1437: cat stderr
-input:
 input:
   | (# + 1) = 1111
 ./calc.at:1426:  $PREPARSER ./calc  input
-  | 1//2
-./calc.at:1437:  $PREPARSER ./calc  input
 stderr:
 syntax error: invalid character: '#'
-stderr:
 ./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1445: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 stderr:
 syntax error: invalid character: '#'
-stderr:
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1437: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 ./calc.at:1426: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -126263,35 +126231,15 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1437: cat stderr
 ./calc.at:1426: cat stderr
 input:
-input:
-  | error
-./calc.at:1437:  $PREPARSER ./calc  input
   | (1 + # + 1) = 1111
 ./calc.at:1426:  $PREPARSER ./calc  input
 stderr:
-stderr:
 syntax error: invalid character: '#'
 ./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.1: syntax error, unexpected invalid token
-./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 stderr:
 syntax error: invalid character: '#'
-1.1: syntax error, unexpected invalid token
-./calc.at:1437: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1437: cat stderr
 ./calc.at:1426: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -126302,41 +126250,15 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | 1 = 2 = 3
-./calc.at:1437:  $PREPARSER ./calc  input
 ./calc.at:1426: cat stderr
-stderr:
 input:
-1.7: syntax error, unexpected '='
-./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | (1 + 1) / (1 - 1)
 ./calc.at:1426:  $PREPARSER ./calc  input
 stderr:
-1.7: syntax error, unexpected '='
-stderr:
 error: null divisor
 ./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1437: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 stderr:
 error: null divisor
-./calc.at:1437: cat stderr
-input:
-  | 
-  | +1
-./calc.at:1437:  $PREPARSER ./calc  input
-stderr:
-2.1: syntax error, unexpected '+'
-./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1426: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -126347,191 +126269,54 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-2.1: syntax error, unexpected '+'
 ./calc.at:1426: cat stderr
-./calc.at:1437: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 522. calc.at:1426:  ok
-./calc.at:1437: cat stderr
-./calc.at:1437:  $PREPARSER ./calc  /dev/null
-stderr:
 
-1.1: syntax error, unexpected end of input
-./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.1: syntax error, unexpected end of input
-./calc.at:1437: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1437: cat stderr
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1437:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.1-46: error: 4444 != 1
-./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.1-46: error: 4444 != 1
-./calc.at:1437: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 534. calc.at:1446: testing Calculator C++ %header %locations parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
 ./calc.at:1446: mv calc.y.tmp calc.y
 
-./calc.at:1437: cat stderr
-input:
 ./calc.at:1446: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-  | (!!) + (1 2) = 1
-./calc.at:1437:  $PREPARSER ./calc  input
-stderr:
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
-./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
-./calc.at:1437: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1437: cat stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1437:  $PREPARSER ./calc  input
-stderr:
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
-./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
-./calc.at:1437: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 ./calc.at:1446: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
-./calc.at:1437: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1437:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-./calc.at:1437: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1437: cat stderr
+stdout:
+./calc.at:1437: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc
+
 input:
-  | 1 + 2 * 3 + !+ ++
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
 ./calc.at:1437:  $PREPARSER ./calc  input
 stderr:
 ./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 ./calc.at:1437: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1437:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1437: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1437: cat stderr
-input:
-  | (#) + (#) = 2222
-./calc.at:1437:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1437: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1437: cat stderr
-input:
-  | (1 + #) = 1111
+  | 1 2
 ./calc.at:1437:  $PREPARSER ./calc  input
 stderr:
-1.6: syntax error: invalid character: '#'
+1.3: syntax error, unexpected number
 ./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.6: syntax error: invalid character: '#'
+1.3: syntax error, unexpected number
 ./calc.at:1437: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -126544,13 +126329,13 @@
 ' expout || exit 77
 ./calc.at:1437: cat stderr
 input:
-  | (# + 1) = 1111
+  | 1//2
 ./calc.at:1437:  $PREPARSER ./calc  input
 stderr:
-1.2: syntax error: invalid character: '#'
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
 ./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.2: syntax error: invalid character: '#'
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
 ./calc.at:1437: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -126563,52 +126348,15 @@
 ' expout || exit 77
 ./calc.at:1437: cat stderr
 input:
-  | (1 + # + 1) = 1111
+  | error
 ./calc.at:1437:  $PREPARSER ./calc  input
 stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1437: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1437: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1437:  $PREPARSER ./calc  input
 stderr:
-1.11-17: error: null divisor
+1.1: syntax error, unexpected invalid token
 ./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.11-17: error: null divisor
-./calc.at:1437: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1437: cat stderr
-528. calc.at:1437:  ok
-
-535. calc.at:1448: testing Calculator C++ %header %locations api.location.file=none  ...
-./calc.at:1448: mv calc.y.tmp calc.y
-
-./calc.at:1448: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1448: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
-stderr:
 stdout:
+stderr:
+1.1: syntax error, unexpected invalid token
 ./calc.at:1438: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
@@ -126620,6 +126368,17 @@
         || /\t/
         )' calc.cc
 
+./calc.at:1437: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1437: cat stderr
 input:
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
@@ -126636,19 +126395,6 @@
   | (2^2)^3 = 64
 ./calc.at:1438:  $PREPARSER ./calc  input
 stderr:
-stdout:
-./calc.at:1435: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc calc.hh
-
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -127669,6 +127415,8 @@
 ./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
 stderr:
+  | 1 = 2 = 3
+./calc.at:1437:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -128687,25 +128435,12 @@
 Cleanup: popping token end of input (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
 ./calc.at:1438: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1435:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+1.7: syntax error, unexpected '='
+./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-./calc.at:1435: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+stderr:
+1.7: syntax error, unexpected '='
   | 1 2
 ./calc.at:1438:  $PREPARSER ./calc  input
 stderr:
@@ -128730,13 +128465,17 @@
 Cleanup: discarding lookahead token number (1.3: 2)
 Stack now 0
 ./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-stderr:
-  | 1 2
-./calc.at:1435:  $PREPARSER ./calc  input
+./calc.at:1437: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
-1.3: syntax error, unexpected number
-./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -128757,8 +128496,11 @@
 Stack now 0
 Cleanup: discarding lookahead token number (1.3: 2)
 Stack now 0
-stderr:
-1.3: syntax error, unexpected number
+./calc.at:1437: cat stderr
+input:
+  | 
+  | +1
+./calc.at:1437:  $PREPARSER ./calc  input
 ./calc.at:1438: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -128769,28 +128511,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1435: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 ./calc.at:1438: cat stderr
-./calc.at:1435: cat stderr
-input:
 input:
   | 1//2
 ./calc.at:1438:  $PREPARSER ./calc  input
-  | 1//2
-./calc.at:1435:  $PREPARSER ./calc  input
 stderr:
 stderr:
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -128819,8 +128545,8 @@
 Cleanup: discarding lookahead token '/' (1.3: )
 Stack now 0
 ./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+2.1: syntax error, unexpected '+'
+./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -128849,7 +128575,9 @@
 Stack now 0
 Cleanup: discarding lookahead token '/' (1.3: )
 Stack now 0
-./calc.at:1435: "$PERL" -pi -e 'use strict;
+stderr:
+2.1: syntax error, unexpected '+'
+./calc.at:1438: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -128859,8 +128587,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1435: cat stderr
-./calc.at:1438: "$PERL" -pi -e 'use strict;
+./calc.at:1438: cat stderr
+./calc.at:1437: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -128871,17 +128599,11 @@
   }eg
 ' expout || exit 77
 input:
-./calc.at:1438: cat stderr
-  | error
-./calc.at:1435:  $PREPARSER ./calc  input
-stderr:
-1.1: syntax error, unexpected invalid token
-./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
+./calc.at:1437: cat stderr
   | error
-stderr:
 ./calc.at:1438:  $PREPARSER ./calc  input
-1.1: syntax error, unexpected invalid token
+./calc.at:1437:  $PREPARSER ./calc  /dev/null
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -128892,16 +128614,9 @@
 Cleanup: discarding lookahead token invalid token (1.1: )
 Stack now 0
 ./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1435: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+1.1: syntax error, unexpected end of input
+./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -128911,7 +128626,34 @@
 1.1: syntax error, unexpected invalid token
 Cleanup: discarding lookahead token invalid token (1.1: )
 Stack now 0
-./calc.at:1435: cat stderr
+stderr:
+1.1: syntax error, unexpected end of input
+stdout:
+./calc.at:1435: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc calc.hh
+
+input:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
 ./calc.at:1438: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -128922,19 +128664,41 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-./calc.at:1438: cat stderr
-  | 1 = 2 = 3
 ./calc.at:1435:  $PREPARSER ./calc  input
 stderr:
-1.7: syntax error, unexpected '='
 ./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1437: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1437: cat stderr
 input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1437:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1438: cat stderr
 stderr:
+./calc.at:1435: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+input:
   | 1 = 2 = 3
 ./calc.at:1438:  $PREPARSER ./calc  input
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.1-46: error: 4444 != 1
+./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 1 2
+./calc.at:1435:  $PREPARSER ./calc  input
+stderr:
 stderr:
-1.7: syntax error, unexpected '='
 Starting parse
 Entering state 0
 Stack now 0
@@ -128976,16 +128740,15 @@
 Stack now 0
 ./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1435: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.1-46: error: 4444 != 1
+1.3: syntax error, unexpected number
+./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -129025,8 +128788,18 @@
 Stack now 0
 Cleanup: discarding lookahead token '=' (1.7: )
 Stack now 0
-./calc.at:1435: cat stderr
-input:
+./calc.at:1437: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1437: cat stderr
+1.3: syntax error, unexpected number
 ./calc.at:1438: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -129037,20 +128810,34 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | 
-  | +1
-./calc.at:1435:  $PREPARSER ./calc  input
+input:
 ./calc.at:1438: cat stderr
+  | (!!) + (1 2) = 1
+./calc.at:1437:  $PREPARSER ./calc  input
 stderr:
-2.1: syntax error, unexpected '+'
-./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1435: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+1.11: syntax error, unexpected number
+1.1-16: error: 2222 != 1
+./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-stderr:
+./calc.at:1435: cat stderr
   | 
   | +1
 ./calc.at:1438:  $PREPARSER ./calc  input
-2.1: syntax error, unexpected '+'
 stderr:
+stderr:
+input:
+1.11: syntax error, unexpected number
+1.1-16: error: 2222 != 1
 Starting parse
 Entering state 0
 Stack now 0
@@ -129077,7 +128864,9 @@
 Cleanup: discarding lookahead token '+' (2.1: )
 Stack now 0
 ./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1435: "$PERL" -pi -e 'use strict;
+  | 1//2
+./calc.at:1435:  $PREPARSER ./calc  input
+./calc.at:1437: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -129087,7 +128876,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1435: cat stderr
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -129114,8 +128903,11 @@
 Stack now 0
 Cleanup: discarding lookahead token '+' (2.1: )
 Stack now 0
-./calc.at:1435:  $PREPARSER ./calc  /dev/null
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+./calc.at:1437: cat stderr
+./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
 ./calc.at:1438: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -129126,13 +128918,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.1: syntax error, unexpected end of input
-./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 ./calc.at:1438: cat stderr
-1.1: syntax error, unexpected end of input
-./calc.at:1438:  $PREPARSER ./calc  /dev/null
-stderr:
+input:
+  | (- *) + (1 2) = 1
+./calc.at:1437:  $PREPARSER ./calc  input
 ./calc.at:1435: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -129143,6 +128932,15 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+./calc.at:1438:  $PREPARSER ./calc  /dev/null
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
+1.1-17: error: 2222 != 1
+./calc.at:1435: cat stderr
+./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -129152,8 +128950,13 @@
 Cleanup: discarding lookahead token end of input (1.1: )
 Stack now 0
 ./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
+1.1-17: error: 2222 != 1
+input:
 stderr:
-./calc.at:1435: cat stderr
+  | error
+./calc.at:1435:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -129162,8 +128965,7 @@
 1.1: syntax error, unexpected end of input
 Cleanup: discarding lookahead token end of input (1.1: )
 Stack now 0
-input:
-./calc.at:1438: "$PERL" -pi -e 'use strict;
+./calc.at:1437: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -129173,27 +128975,54 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1435:  $PREPARSER ./calc  input
-./calc.at:1438: cat stderr
 stderr:
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.1-46: error: 4444 != 1
+./calc.at:1437: cat stderr
+1.1: syntax error, unexpected invalid token
 ./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1438: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
 input:
+1.1: syntax error, unexpected invalid token
+  | (* *) + (*) + (*)
+./calc.at:1437:  $PREPARSER ./calc  input
+./calc.at:1438: cat stderr
 stderr:
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.1-46: error: 4444 != 1
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1435: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+./calc.at:1435: cat stderr
+input:
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 ./calc.at:1438:  $PREPARSER ./calc  input
+input:
+  | 1 = 2 = 3
+./calc.at:1435:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1435: "$PERL" -pi -e 'use strict;
+./calc.at:1437: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -129522,7 +129351,10 @@
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1435: cat stderr
+./calc.at:1437: cat stderr
+1.7: syntax error, unexpected '='
+./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -129840,7 +129672,11 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+1.7: syntax error, unexpected '='
 input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1437:  $PREPARSER ./calc  input
 ./calc.at:1438: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -129851,20 +129687,38 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | (!!) + (1 2) = 1
-./calc.at:1435:  $PREPARSER ./calc  input
+./calc.at:1435: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1438: cat stderr
+./calc.at:1435: cat stderr
 stderr:
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
-./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 
+  | +1
+./calc.at:1435:  $PREPARSER ./calc  input
 input:
 stderr:
+./calc.at:1437: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+2.1: syntax error, unexpected '+'
+./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | (!!) + (1 2) = 1
 ./calc.at:1438:  $PREPARSER ./calc  input
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
 stderr:
+input:
+stderr:
+2.1: syntax error, unexpected '+'
+  | 1 + 2 * 3 + !- ++
+./calc.at:1437:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -130006,16 +129860,8 @@
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1435: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+stderr:
+./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -130157,8 +130003,17 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1435: cat stderr
-input:
+./calc.at:1435: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
 ./calc.at:1438: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -130169,23 +130024,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | (- *) + (1 2) = 1
-./calc.at:1435:  $PREPARSER ./calc  input
 ./calc.at:1438: cat stderr
-stderr:
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
-./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-input:
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
-  | (- *) + (1 2) = 1
-./calc.at:1438:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1435: "$PERL" -pi -e 'use strict;
+./calc.at:1437: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -130195,6 +130035,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+./calc.at:1437: cat stderr
+  | (- *) + (1 2) = 1
+./calc.at:1438:  $PREPARSER ./calc  input
+./calc.at:1435: cat stderr
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -130344,6 +130190,9 @@
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1435:  $PREPARSER ./calc  /dev/null
+stderr:
+input:
 stderr:
 Starting parse
 Entering state 0
@@ -130493,8 +130342,15 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1435: cat stderr
-input:
+1.1: syntax error, unexpected end of input
+  | (#) + (#) = 2222
+./calc.at:1437:  $PREPARSER ./calc  input
+./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./calc.at:1438: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -130505,22 +130361,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | (* *) + (*) + (*)
-./calc.at:1435:  $PREPARSER ./calc  input
+1.1: syntax error, unexpected end of input
 stderr:
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
 ./calc.at:1438: cat stderr
-./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-input:
-  | (* *) + (*) + (*)
-./calc.at:1438:  $PREPARSER ./calc  input
-stderr:
 ./calc.at:1435: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -130531,6 +130376,22 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1437: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+./calc.at:1435: cat stderr
+  | (* *) + (*) + (*)
+./calc.at:1438:  $PREPARSER ./calc  input
+./calc.at:1437: cat stderr
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -130683,8 +130544,14 @@
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
 stderr:
-./calc.at:1435: cat stderr
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+stderr:
+./calc.at:1435:  $PREPARSER ./calc  input
+  | (1 + #) = 1111
+./calc.at:1437:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -130836,7 +130703,9 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-input:
+stdout:
+stderr:
+stderr:
 ./calc.at:1438: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -130847,192 +130716,35 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1435:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1438: cat stderr
-./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1435: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1438:  $PREPARSER ./calc  input
-stderr:
-input:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
-Stack now 0 8 20 29 21
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 20 29 21 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Stack now 0 8 20 29 21 30
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Stack now 0 8 20 29
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 20 5
-Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Stack now 0 8 20 5 14
-Reducing stack by rule 17 (line 108):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Stack now 0 8 20
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1 + 2 * 3 + !- ++
-./calc.at:1435:  $PREPARSER ./calc  input
-stderr:
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
-Stack now 0 8 20 29 21
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 20 29 21 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Stack now 0 8 20 29 21 30
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Stack now 0 8 20 29
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 20 5
-Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Stack now 0 8 20 5 14
-Reducing stack by rule 17 (line 108):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Stack now 0 8 20
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1438: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+./calc.at:1440: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc
+
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.1-46: error: 4444 != 1
 ./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.6: syntax error: invalid character: '#'
+./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1438:  $PREPARSER ./calc  input
+1.6: syntax error: invalid character: '#'
+./calc.at:1438: cat stderr
 stderr:
-stdout:
-./calc.at:1435: "$PERL" -pi -e 'use strict;
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.1-46: error: 4444 != 1
+./calc.at:1437: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -131042,101 +130754,26 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1440:  $PREPARSER ./calc  input
+input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1438:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1440: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc
-
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
-Stack now 0 8 20 29 21
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 20 29 21 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Stack now 0 8 20 29 21 30
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Stack now 0 8 20 29
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 20 5
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Stack now 0 8 20 5 13
-Reducing stack by rule 18 (line 109):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Stack now 0 8 20
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1435: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -131207,52 +130844,17 @@
 Entering state 5
 Stack now 0 8 20 5
 Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Stack now 0 8 20 5 13
-Reducing stack by rule 18 (line 109):
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Stack now 0 8 20 5 14
+Reducing stack by rule 17 (line 108):
    $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
+   $2 = token '+' (1.14: )
 Stack now 0 8 20
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-input:
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1440:  $PREPARSER ./calc  input
-./calc.at:1438: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-  | (#) + (#) = 2222
-./calc.at:1435:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1438: cat stderr
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-input:
+./calc.at:1437: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -132270,12 +131872,103 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
+./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (#) + (#) = 2222
-./calc.at:1438:  $PREPARSER ./calc  input
+./calc.at:1435: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+input:
 stderr:
+./calc.at:1435: cat stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Stack now 0 8 20 29 21
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 20 29 21 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Stack now 0 8 20 29 21 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Stack now 0 8 20 29
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 20 5
+Reading a token
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Stack now 0 8 20 5 14
+Reducing stack by rule 17 (line 108):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Stack now 0 8 20
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1438: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -133294,7 +132987,235 @@
 Cleanup: popping token end of input (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
 ./calc.at:1440: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+  | (# + 1) = 1111
+./calc.at:1437:  $PREPARSER ./calc  input
+stderr:
+input:
+  | (!!) + (1 2) = 1
+./calc.at:1435:  $PREPARSER ./calc  input
+input:
+input:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1438:  $PREPARSER ./calc  input
+  | 1 2
+stderr:
+./calc.at:1440:  $PREPARSER ./calc  input
+1.2: syntax error: invalid character: '#'
+./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.11: syntax error, unexpected number
+1.1-16: error: 2222 != 1
+./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Stack now 0 8 20 29 21
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 20 29 21 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Stack now 0 8 20 29 21 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Stack now 0 8 20 29
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 20 5
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Stack now 0 8 20 5 13
+Reducing stack by rule 18 (line 109):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Stack now 0 8 20
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token number (1.3: 2)
+1.3: syntax error, unexpected number
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token number (1.3: 2)
+Stack now 0
+1.2: syntax error: invalid character: '#'
+./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.11: syntax error, unexpected number
+1.1-16: error: 2222 != 1
 stderr:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Stack now 0 8 20 29 21
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 20 29 21 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Stack now 0 8 20 29 21 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Stack now 0 8 20 29
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 20 5
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Stack now 0 8 20 5 13
+Reducing stack by rule 18 (line 109):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Stack now 0 8 20
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token number (1.3: 2)
+1.3: syntax error, unexpected number
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token number (1.3: 2)
+Stack now 0
 ./calc.at:1435: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -133305,6 +133226,58 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1437: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1437: cat stderr
+./calc.at:1438: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1435: cat stderr
+./calc.at:1440: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+./calc.at:1438: cat stderr
+./calc.at:1440: cat stderr
+  | (1 + # + 1) = 1111
+./calc.at:1437:  $PREPARSER ./calc  input
+stderr:
+input:
+1.6: syntax error: invalid character: '#'
+  | (#) + (#) = 2222
+./calc.at:1438:  $PREPARSER ./calc  input
+input:
+./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 1//2
+./calc.at:1440:  $PREPARSER ./calc  input
+  | (- *) + (1 2) = 1
+./calc.at:1435:  $PREPARSER ./calc  input
+stderr:
+1.6: syntax error: invalid character: '#'
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -133429,11 +133402,51 @@
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 1 2
 stderr:
-./calc.at:1440:  $PREPARSER ./calc  input
-./calc.at:1435: cat stderr
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 22
+Stack now 0 8 22
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '/' (1.3: )
+Stack now 0
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
+1.1-17: error: 2222 != 1
+./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1437: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Stack now 0
@@ -133558,6 +133571,10 @@
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 stderr:
+stderr:
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
+1.1-17: error: 2222 != 1
 Starting parse
 Entering state 0
 Stack now 0
@@ -133572,13 +133589,20 @@
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token number (1.3: 2)
-1.3: syntax error, unexpected number
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 22
+Stack now 0 8 22
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Stack now 0 8
 Error: popping nterm exp (1.1: 1)
 Stack now 0
-Cleanup: discarding lookahead token number (1.3: 2)
+Cleanup: discarding lookahead token '/' (1.3: )
 Stack now 0
-./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1437: cat stderr
 ./calc.at:1438: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -133589,34 +133613,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token number (1.3: 2)
-1.3: syntax error, unexpected number
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token number (1.3: 2)
-Stack now 0
-  | (1 + #) = 1111
-./calc.at:1435:  $PREPARSER ./calc  input
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1438: cat stderr
 ./calc.at:1440: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -133627,14 +133623,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-1.6: syntax error: invalid character: '#'
-input:
-./calc.at:1440: cat stderr
-  | (1 + #) = 1111
-./calc.at:1438:  $PREPARSER ./calc  input
-stderr:
-input:
 ./calc.at:1435: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -133645,6 +133633,32 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+./calc.at:1438: cat stderr
+./calc.at:1440: cat stderr
+  | (1 + 1) / (1 - 1)
+./calc.at:1437:  $PREPARSER ./calc  input
+./calc.at:1435: cat stderr
+input:
+stderr:
+  | (1 + #) = 1111
+./calc.at:1438:  $PREPARSER ./calc  input
+1.11-17: error: null divisor
+./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | error
+stderr:
+./calc.at:1440:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token invalid token (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token (1.1: )
+Stack now 0
+./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -133744,11 +133758,16 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+input:
 ./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1//2
 stderr:
-./calc.at:1435: cat stderr
-./calc.at:1440:  $PREPARSER ./calc  input
+stderr:
+1.11-17: error: null divisor
+  | (* *) + (*) + (*)
+./calc.at:1435:  $PREPARSER ./calc  input
+stdout:
+stderr:
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -133848,68 +133867,59 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-stderr:
+./calc.at:1441: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc
+
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 22
-Stack now 0 8 22
-Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '/' (1.3: )
+Next token is token invalid token (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token (1.1: )
 Stack now 0
-input:
-  | (# + 1) = 1111
-./calc.at:1435:  $PREPARSER ./calc  input
-./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 22
-Stack now 0 8 22
-Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '/' (1.3: )
-Stack now 0
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1437: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
 stderr:
-1.2: syntax error: invalid character: '#'
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1441:  $PREPARSER ./calc  input
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 ./calc.at:1438: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -133920,10 +133930,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1438: cat stderr
 stderr:
-input:
 ./calc.at:1440: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -133934,91 +133941,96 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.2: syntax error: invalid character: '#'
-  | (# + 1) = 1111
-./calc.at:1440: cat stderr
-./calc.at:1438:  $PREPARSER ./calc  input
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
 Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.1-2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.1-2: )
-Stack now 0 4
-Shifting token error (1.1-2: )
-Entering state 11
-Stack now 0 4 11
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Stack now 0 8 20
 Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Error: popping token error (1.1-2: )
-Stack now 0 4
-Shifting token error (1.1-4: )
-Entering state 11
-Stack now 0 4 11
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Stack now 0 8 20 29
 Reading a token
-Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
-Error: popping token error (1.1-4: )
-Stack now 0 4
-Shifting token error (1.1-6: )
-Entering state 11
-Stack now 0 4 11
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Stack now 0 8 20 29 21
 Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 20 29 21 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Stack now 0 8 20 29 21 30
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Stack now 0 8 20 29
+Next token is token '=' (1.11: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
 Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
 Entering state 18
 Stack now 0 8 18
 Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
+Next token is token number (1.13: 7)
+Shifting token number (1.13: 7)
 Entering state 1
 Stack now 0 8 18 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
+   $1 = token number (1.13: 7)
+-> $$ = nterm exp (1.13: 7)
 Entering state 27
 Stack now 0 8 18 27
 Reading a token
-Next token is token '\n' (1.15-2.0: )
+Next token is token '\n' (1.14-2.0: )
 Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+   $1 = nterm exp (1.1-9: 7)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13: 7)
+-> $$ = nterm exp (1.1-13: 7)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Next token is token '\n' (1.14-2.0: )
+Shifting token '\n' (1.14-2.0: )
 Entering state 24
 Stack now 0 8 24
 Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
+   $1 = nterm exp (1.1-13: 7)
+   $2 = token '\n' (1.14-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -134028,2562 +134040,833 @@
 Entering state 6
 Stack now 0 6
 Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1435: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-stderr:
-  | error
-./calc.at:1440:  $PREPARSER ./calc  input
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token number (2.1: 1)
+Shifting token number (2.1: 1)
+Entering state 1
+Stack now 0 6 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (2.1: 1)
+-> $$ = nterm exp (2.1: 1)
+Entering state 8
+Stack now 0 6 8
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
+Next token is token '+' (2.3: )
+Shifting token '+' (2.3: )
+Entering state 20
+Stack now 0 6 8 20
 Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.1-2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.1-2: )
-Stack now 0 4
-Shifting token error (1.1-2: )
-Entering state 11
-Stack now 0 4 11
+Next token is token number (2.5: 2)
+Shifting token number (2.5: 2)
+Entering state 1
+Stack now 0 6 8 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (2.5: 2)
+-> $$ = nterm exp (2.5: 2)
+Entering state 29
+Stack now 0 6 8 20 29
 Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Error: popping token error (1.1-2: )
-Stack now 0 4
-Shifting token error (1.1-4: )
-Entering state 11
-Stack now 0 4 11
+Next token is token '*' (2.7: )
+Shifting token '*' (2.7: )
+Entering state 21
+Stack now 0 6 8 20 29 21
 Reading a token
-Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
-Error: popping token error (1.1-4: )
-Stack now 0 4
-Shifting token error (1.1-6: )
-Entering state 11
-Stack now 0 4 11
+Next token is token '-' (2.9: )
+Shifting token '-' (2.9: )
+Entering state 2
+Stack now 0 6 8 20 29 21 2
 Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Stack now 0 8
+Next token is token number (2.10: 3)
+Shifting token number (2.10: 3)
+Entering state 1
+Stack now 0 6 8 20 29 21 2 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (2.10: 3)
+-> $$ = nterm exp (2.10: 3)
+Entering state 10
+Stack now 0 6 8 20 29 21 2 10
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
+Next token is token '=' (2.12: )
+Reducing stack by rule 11 (line 102):
+   $1 = token '-' (2.9: )
+   $2 = nterm exp (2.10: 3)
+-> $$ = nterm exp (2.9-10: -3)
+Entering state 30
+Stack now 0 6 8 20 29 21 30
+Next token is token '=' (2.12: )
+Reducing stack by rule 9 (line 92):
+   $1 = nterm exp (2.5: 2)
+   $2 = token '*' (2.7: )
+   $3 = nterm exp (2.9-10: -3)
+-> $$ = nterm exp (2.5-10: -6)
+Entering state 29
+Stack now 0 6 8 20 29
+Next token is token '=' (2.12: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (2.1: 1)
+   $2 = token '+' (2.3: )
+   $3 = nterm exp (2.5-10: -6)
+-> $$ = nterm exp (2.1-10: -5)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (2.12: )
+Shifting token '=' (2.12: )
 Entering state 18
-Stack now 0 8 18
+Stack now 0 6 8 18
 Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
+Next token is token '-' (2.14: )
+Shifting token '-' (2.14: )
+Entering state 2
+Stack now 0 6 8 18 2
+Reading a token
+Next token is token number (2.15: 5)
+Shifting token number (2.15: 5)
 Entering state 1
-Stack now 0 8 18 1
+Stack now 0 6 8 18 2 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 27
-Stack now 0 8 18 27
+   $1 = token number (2.15: 5)
+-> $$ = nterm exp (2.15: 5)
+Entering state 10
+Stack now 0 6 8 18 2 10
 Reading a token
-Next token is token '\n' (1.15-2.0: )
+Next token is token '\n' (2.16-3.0: )
+Reducing stack by rule 11 (line 102):
+   $1 = token '-' (2.14: )
+   $2 = nterm exp (2.15: 5)
+-> $$ = nterm exp (2.14-15: -5)
+Entering state 27
+Stack now 0 6 8 18 27
+Next token is token '\n' (2.16-3.0: )
 Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+   $1 = nterm exp (2.1-10: -5)
+   $2 = token '=' (2.12: )
+   $3 = nterm exp (2.14-15: -5)
+-> $$ = nterm exp (2.1-15: -5)
 Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Stack now 0 6 8
+Next token is token '\n' (2.16-3.0: )
+Shifting token '\n' (2.16-3.0: )
 Entering state 24
-Stack now 0 8 24
+Stack now 0 6 8 24
 Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+   $1 = nterm exp (2.1-15: -5)
+   $2 = token '\n' (2.16-3.0: )
+-> $$ = nterm line (2.1-3.0: )
+Entering state 17
+Stack now 0 6 17
+Reducing stack by rule 2 (line 70):
+   $1 = nterm input (1.1-2.0: )
+   $2 = nterm line (2.1-3.0: )
+-> $$ = nterm input (1.1-3.0: )
 Entering state 6
 Stack now 0 6
 Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1435: cat stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token invalid token (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token (1.1: )
-Stack now 0
-./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1438: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-./calc.at:1438: cat stderr
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token invalid token (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token (1.1: )
-Stack now 0
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1435:  $PREPARSER ./calc  input
-stderr:
-input:
-1.6: syntax error: invalid character: '#'
-./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1440: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-  | (1 + # + 1) = 1111
-./calc.at:1438:  $PREPARSER ./calc  input
-stderr:
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '\n' (3.1-4.0: )
+Shifting token '\n' (3.1-4.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 74):
+   $1 = token '\n' (3.1-4.0: )
+-> $$ = nterm line (3.1-4.0: )
+Entering state 17
+Stack now 0 6 17
+Reducing stack by rule 2 (line 70):
+   $1 = nterm input (1.1-3.0: )
+   $2 = nterm line (3.1-4.0: )
+-> $$ = nterm input (1.1-4.0: )
+Entering state 6
+Stack now 0 6
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
+Next token is token '-' (4.1: )
+Shifting token '-' (4.1: )
+Entering state 2
+Stack now 0 6 2
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
+Next token is token number (4.2: 1)
+Shifting token number (4.2: 1)
 Entering state 1
-Stack now 0 4 1
+Stack now 0 6 2 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Stack now 0 4 12 20
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
+   $1 = token number (4.2: 1)
+-> $$ = nterm exp (4.2: 1)
+Entering state 10
+Stack now 0 6 2 10
 Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-8: )
-Entering state 11
-Stack now 0 4 11
+Next token is token '^' (4.3: )
+Shifting token '^' (4.3: )
+Entering state 23
+Stack now 0 6 2 10 23
 Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
-Error: popping token error (1.2-8: )
-Stack now 0 4
-Shifting token error (1.2-10: )
-Entering state 11
-Stack now 0 4 11
+Next token is token number (4.4: 2)
+Shifting token number (4.4: 2)
+Entering state 1
+Stack now 0 6 2 10 23 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (4.4: 2)
+-> $$ = nterm exp (4.4: 2)
+Entering state 32
+Stack now 0 6 2 10 23 32
 Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
+Next token is token '=' (4.6: )
+Reducing stack by rule 12 (line 103):
+   $1 = nterm exp (4.2: 1)
+   $2 = token '^' (4.3: )
+   $3 = nterm exp (4.4: 2)
+-> $$ = nterm exp (4.2-4: 1)
+Entering state 10
+Stack now 0 6 2 10
+Next token is token '=' (4.6: )
+Reducing stack by rule 11 (line 102):
+   $1 = token '-' (4.1: )
+   $2 = nterm exp (4.2-4: 1)
+-> $$ = nterm exp (4.1-4: -1)
 Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
+Stack now 0 6 8
+Next token is token '=' (4.6: )
+Shifting token '=' (4.6: )
 Entering state 18
-Stack now 0 8 18
+Stack now 0 6 8 18
 Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
+Next token is token '-' (4.8: )
+Shifting token '-' (4.8: )
+Entering state 2
+Stack now 0 6 8 18 2
+Reading a token
+Next token is token number (4.9: 1)
+Shifting token number (4.9: 1)
 Entering state 1
-Stack now 0 8 18 1
+Stack now 0 6 8 18 2 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 27
-Stack now 0 8 18 27
+   $1 = token number (4.9: 1)
+-> $$ = nterm exp (4.9: 1)
+Entering state 10
+Stack now 0 6 8 18 2 10
 Reading a token
-Next token is token '\n' (1.19-2.0: )
+Next token is token '\n' (4.10-5.0: )
+Reducing stack by rule 11 (line 102):
+   $1 = token '-' (4.8: )
+   $2 = nterm exp (4.9: 1)
+-> $$ = nterm exp (4.8-9: -1)
+Entering state 27
+Stack now 0 6 8 18 27
+Next token is token '\n' (4.10-5.0: )
 Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
+   $1 = nterm exp (4.1-4: -1)
+   $2 = token '=' (4.6: )
+   $3 = nterm exp (4.8-9: -1)
+-> $$ = nterm exp (4.1-9: -1)
 Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
+Stack now 0 6 8
+Next token is token '\n' (4.10-5.0: )
+Shifting token '\n' (4.10-5.0: )
 Entering state 24
-Stack now 0 8 24
+Stack now 0 6 8 24
 Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+   $1 = nterm exp (4.1-9: -1)
+   $2 = token '\n' (4.10-5.0: )
+-> $$ = nterm line (4.1-5.0: )
+Entering state 17
+Stack now 0 6 17
+Reducing stack by rule 2 (line 70):
+   $1 = nterm input (1.1-4.0: )
+   $2 = nterm line (4.1-5.0: )
+-> $$ = nterm input (1.1-5.0: )
 Entering state 6
 Stack now 0 6
 Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.6: syntax error: invalid character: '#'
-./calc.at:1440: cat stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
+Next token is token '(' (5.1: )
+Shifting token '(' (5.1: )
 Entering state 4
-Stack now 0 4
+Stack now 0 6 4
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
+Next token is token '-' (5.2: )
+Shifting token '-' (5.2: )
+Entering state 2
+Stack now 0 6 4 2
+Reading a token
+Next token is token number (5.3: 1)
+Shifting token number (5.3: 1)
 Entering state 1
-Stack now 0 4 1
+Stack now 0 6 4 2 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Stack now 0 4 12 20
+   $1 = token number (5.3: 1)
+-> $$ = nterm exp (5.3: 1)
+Entering state 10
+Stack now 0 6 4 2 10
 Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
+Next token is token ')' (5.4: )
+Reducing stack by rule 11 (line 102):
+   $1 = token '-' (5.2: )
+   $2 = nterm exp (5.3: 1)
+-> $$ = nterm exp (5.2-3: -1)
+Entering state 12
+Stack now 0 6 4 12
+Next token is token ')' (5.4: )
+Shifting token ')' (5.4: )
+Entering state 26
+Stack now 0 6 4 12 26
+Reducing stack by rule 13 (line 104):
+   $1 = token '(' (5.1: )
+   $2 = nterm exp (5.2-3: -1)
+   $3 = token ')' (5.4: )
+-> $$ = nterm exp (5.1-4: -1)
+Entering state 8
+Stack now 0 6 8
 Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-8: )
-Entering state 11
-Stack now 0 4 11
+Next token is token '^' (5.5: )
+Shifting token '^' (5.5: )
+Entering state 23
+Stack now 0 6 8 23
 Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
-Error: popping token error (1.2-8: )
-Stack now 0 4
-Shifting token error (1.2-10: )
-Entering state 11
-Stack now 0 4 11
+Next token is token number (5.6: 2)
+Shifting token number (5.6: 2)
+Entering state 1
+Stack now 0 6 8 23 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (5.6: 2)
+-> $$ = nterm exp (5.6: 2)
+Entering state 32
+Stack now 0 6 8 23 32
 Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
+Next token is token '=' (5.8: )
+Reducing stack by rule 12 (line 103):
+   $1 = nterm exp (5.1-4: -1)
+   $2 = token '^' (5.5: )
+   $3 = nterm exp (5.6: 2)
+-> $$ = nterm exp (5.1-6: 1)
 Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
+Stack now 0 6 8
+Next token is token '=' (5.8: )
+Shifting token '=' (5.8: )
 Entering state 18
-Stack now 0 8 18
+Stack now 0 6 8 18
 Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
+Next token is token number (5.10: 1)
+Shifting token number (5.10: 1)
 Entering state 1
-Stack now 0 8 18 1
+Stack now 0 6 8 18 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
+   $1 = token number (5.10: 1)
+-> $$ = nterm exp (5.10: 1)
 Entering state 27
-Stack now 0 8 18 27
+Stack now 0 6 8 18 27
 Reading a token
-Next token is token '\n' (1.19-2.0: )
+Next token is token '\n' (5.11-6.0: )
 Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
+   $1 = nterm exp (5.1-6: 1)
+   $2 = token '=' (5.8: )
+   $3 = nterm exp (5.10: 1)
+-> $$ = nterm exp (5.1-10: 1)
 Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
+Stack now 0 6 8
+Next token is token '\n' (5.11-6.0: )
+Shifting token '\n' (5.11-6.0: )
 Entering state 24
-Stack now 0 8 24
+Stack now 0 6 8 24
 Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+   $1 = nterm exp (5.1-10: 1)
+   $2 = token '\n' (5.11-6.0: )
+-> $$ = nterm line (5.1-6.0: )
+Entering state 17
+Stack now 0 6 17
+Reducing stack by rule 2 (line 70):
+   $1 = nterm input (1.1-5.0: )
+   $2 = nterm line (5.1-6.0: )
+-> $$ = nterm input (1.1-6.0: )
 Entering state 6
 Stack now 0 6
 Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1435: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1438: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1435: cat stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1440:  $PREPARSER ./calc  input
-./calc.at:1438: cat stderr
-stderr:
-input:
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '\n' (6.1-7.0: )
+Shifting token '\n' (6.1-7.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 74):
+   $1 = token '\n' (6.1-7.0: )
+-> $$ = nterm line (6.1-7.0: )
+Entering state 17
+Stack now 0 6 17
+Reducing stack by rule 2 (line 70):
+   $1 = nterm input (1.1-6.0: )
+   $2 = nterm line (6.1-7.0: )
+-> $$ = nterm input (1.1-7.0: )
+Entering state 6
+Stack now 0 6
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
+Next token is token '-' (7.1: )
+Shifting token '-' (7.1: )
+Entering state 2
+Stack now 0 6 2
+Reading a token
+Next token is token '-' (7.2: )
+Shifting token '-' (7.2: )
+Entering state 2
+Stack now 0 6 2 2
+Reading a token
+Next token is token '-' (7.3: )
+Shifting token '-' (7.3: )
+Entering state 2
+Stack now 0 6 2 2 2
+Reading a token
+Next token is token number (7.4: 1)
+Shifting token number (7.4: 1)
 Entering state 1
-Stack now 0 1
+Stack now 0 6 2 2 2 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
+   $1 = token number (7.4: 1)
+-> $$ = nterm exp (7.4: 1)
+Entering state 10
+Stack now 0 6 2 2 2 10
 Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
+Next token is token '=' (7.6: )
+Reducing stack by rule 11 (line 102):
+   $1 = token '-' (7.3: )
+   $2 = nterm exp (7.4: 1)
+-> $$ = nterm exp (7.3-4: -1)
+Entering state 10
+Stack now 0 6 2 2 10
+Next token is token '=' (7.6: )
+Reducing stack by rule 11 (line 102):
+   $1 = token '-' (7.2: )
+   $2 = nterm exp (7.3-4: -1)
+-> $$ = nterm exp (7.2-4: 1)
+Entering state 10
+Stack now 0 6 2 10
+Next token is token '=' (7.6: )
+Reducing stack by rule 11 (line 102):
+   $1 = token '-' (7.1: )
+   $2 = nterm exp (7.2-4: 1)
+-> $$ = nterm exp (7.1-4: -1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (7.6: )
+Shifting token '=' (7.6: )
 Entering state 18
-Stack now 0 8 18
+Stack now 0 6 8 18
 Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
+Next token is token '-' (7.8: )
+Shifting token '-' (7.8: )
+Entering state 2
+Stack now 0 6 8 18 2
+Reading a token
+Next token is token number (7.9: 1)
+Shifting token number (7.9: 1)
 Entering state 1
-Stack now 0 8 18 1
+Stack now 0 6 8 18 2 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
+   $1 = token number (7.9: 1)
+-> $$ = nterm exp (7.9: 1)
+Entering state 10
+Stack now 0 6 8 18 2 10
+Reading a token
+Next token is token '\n' (7.10-8.0: )
+Reducing stack by rule 11 (line 102):
+   $1 = token '-' (7.8: )
+   $2 = nterm exp (7.9: 1)
+-> $$ = nterm exp (7.8-9: -1)
 Entering state 27
-Stack now 0 8 18 27
+Stack now 0 6 8 18 27
+Next token is token '\n' (7.10-8.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (7.1-4: -1)
+   $2 = token '=' (7.6: )
+   $3 = nterm exp (7.8-9: -1)
+-> $$ = nterm exp (7.1-9: -1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (7.10-8.0: )
+Shifting token '\n' (7.10-8.0: )
+Entering state 24
+Stack now 0 6 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (7.1-9: -1)
+   $2 = token '\n' (7.10-8.0: )
+-> $$ = nterm line (7.1-8.0: )
+Entering state 17
+Stack now 0 6 17
+Reducing stack by rule 2 (line 70):
+   $1 = nterm input (1.1-7.0: )
+   $2 = nterm line (7.1-8.0: )
+-> $$ = nterm input (1.1-8.0: )
+Entering state 6
+Stack now 0 6
 Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Stack now 0 8 18
-Error: popping token '=' (1.3: )
-Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '=' (1.7: )
-Stack now 0
-./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '\n' (8.1-9.0: )
+Shifting token '\n' (8.1-9.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 74):
+   $1 = token '\n' (8.1-9.0: )
+-> $$ = nterm line (8.1-9.0: )
+Entering state 17
+Stack now 0 6 17
+Reducing stack by rule 2 (line 70):
+   $1 = nterm input (1.1-8.0: )
+   $2 = nterm line (8.1-9.0: )
+-> $$ = nterm input (1.1-9.0: )
+Entering state 6
+Stack now 0 6
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
+Next token is token number (9.1: 1)
+Shifting token number (9.1: 1)
 Entering state 1
-Stack now 0 1
+Stack now 0 6 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+   $1 = token number (9.1: 1)
+-> $$ = nterm exp (9.1: 1)
 Entering state 8
-Stack now 0 8
+Stack now 0 6 8
 Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
-Entering state 18
-Stack now 0 8 18
+Next token is token '-' (9.3: )
+Shifting token '-' (9.3: )
+Entering state 19
+Stack now 0 6 8 19
 Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
+Next token is token number (9.5: 2)
+Shifting token number (9.5: 2)
 Entering state 1
-Stack now 0 8 18 1
+Stack now 0 6 8 19 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 27
-Stack now 0 8 18 27
-Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Stack now 0 8 18
-Error: popping token '=' (1.3: )
-Stack now 0 8
-Error: popping nterm exp (1.1: 1)
-Stack now 0
-Cleanup: discarding lookahead token '=' (1.7: )
-Stack now 0
-  | (1 + 1) / (1 - 1)
-./calc.at:1435:  $PREPARSER ./calc  input
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1438:  $PREPARSER ./calc  input
-stderr:
-stderr:
-1.11-17: error: null divisor
-./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-Starting parse
-Entering state 0
-Stack now 0
+   $1 = token number (9.5: 2)
+-> $$ = nterm exp (9.5: 2)
+Entering state 28
+Stack now 0 6 8 19 28
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
+Next token is token '-' (9.7: )
+Reducing stack by rule 8 (line 91):
+   $1 = nterm exp (9.1: 1)
+   $2 = token '-' (9.3: )
+   $3 = nterm exp (9.5: 2)
+-> $$ = nterm exp (9.1-5: -1)
+Entering state 8
+Stack now 0 6 8
+Next token is token '-' (9.7: )
+Shifting token '-' (9.7: )
+Entering state 19
+Stack now 0 6 8 19
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
+Next token is token number (9.9: 3)
+Shifting token number (9.9: 3)
 Entering state 1
-Stack now 0 4 1
+Stack now 0 6 8 19 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
+   $1 = token number (9.9: 3)
+-> $$ = nterm exp (9.9: 3)
+Entering state 28
+Stack now 0 6 8 19 28
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Stack now 0 4 12 20
+Next token is token '=' (9.11: )
+Reducing stack by rule 8 (line 91):
+   $1 = nterm exp (9.1-5: -1)
+   $2 = token '-' (9.7: )
+   $3 = nterm exp (9.9: 3)
+-> $$ = nterm exp (9.1-9: -4)
+Entering state 8
+Stack now 0 6 8
+Next token is token '=' (9.11: )
+Shifting token '=' (9.11: )
+Entering state 18
+Stack now 0 6 8 18
 Reading a token
-Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
+Next token is token '-' (9.13: )
+Shifting token '-' (9.13: )
+Entering state 2
+Stack now 0 6 8 18 2
+Reading a token
+Next token is token number (9.14: 4)
+Shifting token number (9.14: 4)
 Entering state 1
-Stack now 0 4 12 20 1
+Stack now 0 6 8 18 2 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 29
-Stack now 0 4 12 20 29
+   $1 = token number (9.14: 4)
+-> $$ = nterm exp (9.14: 4)
+Entering state 10
+Stack now 0 6 8 18 2 10
 Reading a token
-Next token is token ')' (1.7: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 12 26
-Reducing stack by rule 13 (line 104):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
+Next token is token '\n' (9.15-10.0: )
+Reducing stack by rule 11 (line 102):
+   $1 = token '-' (9.13: )
+   $2 = nterm exp (9.14: 4)
+-> $$ = nterm exp (9.13-14: -4)
+Entering state 27
+Stack now 0 6 8 18 27
+Next token is token '\n' (9.15-10.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (9.1-9: -4)
+   $2 = token '=' (9.11: )
+   $3 = nterm exp (9.13-14: -4)
+-> $$ = nterm exp (9.1-14: -4)
 Entering state 8
-Stack now 0 8
+Stack now 0 6 8
+Next token is token '\n' (9.15-10.0: )
+Shifting token '\n' (9.15-10.0: )
+Entering state 24
+Stack now 0 6 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (9.1-14: -4)
+   $2 = token '\n' (9.15-10.0: )
+-> $$ = nterm line (9.1-10.0: )
+Entering state 17
+Stack now 0 6 17
+Reducing stack by rule 2 (line 70):
+   $1 = nterm input (1.1-9.0: )
+   $2 = nterm line (9.1-10.0: )
+-> $$ = nterm input (1.1-10.0: )
+Entering state 6
+Stack now 0 6
 Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 22
-Stack now 0 8 22
+Next token is token number (10.1: 1)
+Shifting token number (10.1: 1)
+Entering state 1
+Stack now 0 6 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (10.1: 1)
+-> $$ = nterm exp (10.1: 1)
+Entering state 8
+Stack now 0 6 8
 Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
+Next token is token '-' (10.3: )
+Shifting token '-' (10.3: )
+Entering state 19
+Stack now 0 6 8 19
+Reading a token
+Next token is token '(' (10.5: )
+Shifting token '(' (10.5: )
 Entering state 4
-Stack now 0 8 22 4
+Stack now 0 6 8 19 4
 Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
+Next token is token number (10.6: 2)
+Shifting token number (10.6: 2)
 Entering state 1
-Stack now 0 8 22 4 1
+Stack now 0 6 8 19 4 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.12: 1)
--> $$ = nterm exp (1.12: 1)
+   $1 = token number (10.6: 2)
+-> $$ = nterm exp (10.6: 2)
 Entering state 12
-Stack now 0 8 22 4 12
+Stack now 0 6 8 19 4 12
 Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
+Next token is token '-' (10.8: )
+Shifting token '-' (10.8: )
 Entering state 19
-Stack now 0 8 22 4 12 19
+Stack now 0 6 8 19 4 12 19
 Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
+Next token is token number (10.10: 3)
+Shifting token number (10.10: 3)
 Entering state 1
-Stack now 0 8 22 4 12 19 1
+Stack now 0 6 8 19 4 12 19 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
+   $1 = token number (10.10: 3)
+-> $$ = nterm exp (10.10: 3)
 Entering state 28
-Stack now 0 8 22 4 12 19 28
+Stack now 0 6 8 19 4 12 19 28
 Reading a token
-Next token is token ')' (1.17: )
+Next token is token ')' (10.11: )
 Reducing stack by rule 8 (line 91):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
+   $1 = nterm exp (10.6: 2)
+   $2 = token '-' (10.8: )
+   $3 = nterm exp (10.10: 3)
+-> $$ = nterm exp (10.6-10: -1)
 Entering state 12
-Stack now 0 8 22 4 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
+Stack now 0 6 8 19 4 12
+Next token is token ')' (10.11: )
+Shifting token ')' (10.11: )
 Entering state 26
-Stack now 0 8 22 4 12 26
+Stack now 0 6 8 19 4 12 26
 Reducing stack by rule 13 (line 104):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 31
-Stack now 0 8 22 31
+   $1 = token '(' (10.5: )
+   $2 = nterm exp (10.6-10: -1)
+   $3 = token ')' (10.11: )
+-> $$ = nterm exp (10.5-11: -1)
+Entering state 28
+Stack now 0 6 8 19 28
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 10 (line 93):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
+Next token is token '=' (10.13: )
+Reducing stack by rule 8 (line 91):
+   $1 = nterm exp (10.1: 1)
+   $2 = token '-' (10.3: )
+   $3 = nterm exp (10.5-11: -1)
+-> $$ = nterm exp (10.1-11: 2)
 Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Stack now 0 6 8
+Next token is token '=' (10.13: )
+Shifting token '=' (10.13: )
+Entering state 18
+Stack now 0 6 8 18
+Reading a token
+Next token is token number (10.15: 2)
+Shifting token number (10.15: 2)
+Entering state 1
+Stack now 0 6 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (10.15: 2)
+-> $$ = nterm exp (10.15: 2)
+Entering state 27
+Stack now 0 6 8 18 27
+Reading a token
+Next token is token '\n' (10.16-11.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (10.1-11: 2)
+   $2 = token '=' (10.13: )
+   $3 = nterm exp (10.15: 2)
+-> $$ = nterm exp (10.1-15: 2)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (10.16-11.0: )
+Shifting token '\n' (10.16-11.0: )
 Entering state 24
-Stack now 0 8 24
+Stack now 0 6 8 24
 Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+   $1 = nterm exp (10.1-15: 2)
+   $2 = token '\n' (10.16-11.0: )
+-> $$ = nterm line (10.1-11.0: )
+Entering state 17
+Stack now 0 6 17
+Reducing stack by rule 2 (line 70):
+   $1 = nterm input (1.1-10.0: )
+   $2 = nterm line (10.1-11.0: )
+-> $$ = nterm input (1.1-11.0: )
 Entering state 6
 Stack now 0 6
 Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-./calc.at:1440: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-1.11-17: error: null divisor
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '\n' (11.1-12.0: )
+Shifting token '\n' (11.1-12.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 74):
+   $1 = token '\n' (11.1-12.0: )
+-> $$ = nterm line (11.1-12.0: )
+Entering state 17
+Stack now 0 6 17
+Reducing stack by rule 2 (line 70):
+   $1 = nterm input (1.1-11.0: )
+   $2 = nterm line (11.1-12.0: )
+-> $$ = nterm input (1.1-12.0: )
+Entering state 6
+Stack now 0 6
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
+Next token is token number (12.1: 2)
+Shifting token number (12.1: 2)
+Entering state 1
+Stack now 0 6 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (12.1: 2)
+-> $$ = nterm exp (12.1: 2)
+Entering state 8
+Stack now 0 6 8
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
+Next token is token '^' (12.2: )
+Shifting token '^' (12.2: )
+Entering state 23
+Stack now 0 6 8 23
+Reading a token
+Next token is token number (12.3: 2)
+Shifting token number (12.3: 2)
 Entering state 1
-Stack now 0 4 1
+Stack now 0 6 8 23 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
+   $1 = token number (12.3: 2)
+-> $$ = nterm exp (12.3: 2)
+Entering state 32
+Stack now 0 6 8 23 32
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Stack now 0 4 12 20
+Next token is token '^' (12.4: )
+Shifting token '^' (12.4: )
+Entering state 23
+Stack now 0 6 8 23 32 23
 Reading a token
-Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
+Next token is token number (12.5: 3)
+Shifting token number (12.5: 3)
 Entering state 1
-Stack now 0 4 12 20 1
+Stack now 0 6 8 23 32 23 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 29
-Stack now 0 4 12 20 29
+   $1 = token number (12.5: 3)
+-> $$ = nterm exp (12.5: 3)
+Entering state 32
+Stack now 0 6 8 23 32 23 32
 Reading a token
-Next token is token ')' (1.7: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 12 26
-Reducing stack by rule 13 (line 104):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
+Next token is token '=' (12.7: )
+Reducing stack by rule 12 (line 103):
+   $1 = nterm exp (12.3: 2)
+   $2 = token '^' (12.4: )
+   $3 = nterm exp (12.5: 3)
+-> $$ = nterm exp (12.3-5: 8)
+Entering state 32
+Stack now 0 6 8 23 32
+Next token is token '=' (12.7: )
+Reducing stack by rule 12 (line 103):
+   $1 = nterm exp (12.1: 2)
+   $2 = token '^' (12.2: )
+   $3 = nterm exp (12.3-5: 8)
+-> $$ = nterm exp (12.1-5: 256)
 Entering state 8
-Stack now 0 8
+Stack now 0 6 8
+Next token is token '=' (12.7: )
+Shifting token '=' (12.7: )
+Entering state 18
+Stack now 0 6 8 18
 Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 22
-Stack now 0 8 22
+Next token is token number (12.9-11: 256)
+Shifting token number (12.9-11: 256)
+Entering state 1
+Stack now 0 6 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (12.9-11: 256)
+-> $$ = nterm exp (12.9-11: 256)
+Entering state 27
+Stack now 0 6 8 18 27
 Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
+Next token is token '\n' (12.12-13.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (12.1-5: 256)
+   $2 = token '=' (12.7: )
+   $3 = nterm exp (12.9-11: 256)
+-> $$ = nterm exp (12.1-11: 256)
+Entering state 8
+Stack now 0 6 8
+Next token is token '\n' (12.12-13.0: )
+Shifting token '\n' (12.12-13.0: )
+Entering state 24
+Stack now 0 6 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (12.1-11: 256)
+   $2 = token '\n' (12.12-13.0: )
+-> $$ = nterm line (12.1-13.0: )
+Entering state 17
+Stack now 0 6 17
+Reducing stack by rule 2 (line 70):
+   $1 = nterm input (1.1-12.0: )
+   $2 = nterm line (12.1-13.0: )
+-> $$ = nterm input (1.1-13.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '(' (13.1: )
+Shifting token '(' (13.1: )
 Entering state 4
-Stack now 0 8 22 4
+Stack now 0 6 4
 Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
+Next token is token number (13.2: 2)
+Shifting token number (13.2: 2)
 Entering state 1
-Stack now 0 8 22 4 1
+Stack now 0 6 4 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.12: 1)
--> $$ = nterm exp (1.12: 1)
+   $1 = token number (13.2: 2)
+-> $$ = nterm exp (13.2: 2)
 Entering state 12
-Stack now 0 8 22 4 12
+Stack now 0 6 4 12
 Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 19
-Stack now 0 8 22 4 12 19
+Next token is token '^' (13.3: )
+Shifting token '^' (13.3: )
+Entering state 23
+Stack now 0 6 4 12 23
 Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
+Next token is token number (13.4: 2)
+Shifting token number (13.4: 2)
 Entering state 1
-Stack now 0 8 22 4 12 19 1
+Stack now 0 6 4 12 23 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 28
-Stack now 0 8 22 4 12 19 28
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack by rule 8 (line 91):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Stack now 0 8 22 4 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Stack now 0 8 22 4 12 26
-Reducing stack by rule 13 (line 104):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 31
-Stack now 0 8 22 31
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 10 (line 93):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1435: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1440: cat stderr
-./calc.at:1438: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-./calc.at:1435: cat stderr
-  | 
-  | +1
-./calc.at:1440:  $PREPARSER ./calc  input
-stderr:
-527. calc.at:1435:  ok
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Stack now 0 3
-Reducing stack by rule 3 (line 74):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Stack now 0
-Cleanup: discarding lookahead token '+' (2.1: )
-Stack now 0
-./calc.at:1438: cat stderr
-./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-529. calc.at:1438: stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Stack now 0 3
-Reducing stack by rule 3 (line 74):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Stack now 0
-Cleanup: discarding lookahead token '+' (2.1: )
-Stack now 0
- ok
-
-./calc.at:1440: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1440: cat stderr
-
-./calc.at:1440:  $PREPARSER ./calc  /dev/null
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token end of input (1.1: )
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token end of input (1.1: )
-Stack now 0
-./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token end of input (1.1: )
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token end of input (1.1: )
-Stack now 0
-./calc.at:1440: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-536. calc.at:1449: testing Calculator C++ %header %locations api.location.file="my-location.hh"  ...
-./calc.at:1440: cat stderr
-./calc.at:1449: mv calc.y.tmp calc.y
-
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1440:  $PREPARSER ./calc  input
-./calc.at:1449: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-stderr:
-stdout:
-./calc.at:1441: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc
-
-537. calc.at:1451: testing Calculator C++ %no-lines %header %locations api.location.file="my-location.hh"  ...
-./calc.at:1451: mv calc.y.tmp calc.y
-
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token ')' (1.2: )
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token ')' (1.2: )
-Shifting token ')' (1.2: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.2: )
--> $$ = nterm exp (1.1-2: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '(' (1.6: )
-Shifting token '(' (1.6: )
-Entering state 4
-Stack now 0 8 20 4
-Reading a token
-Next token is token number (1.7: 1)
-Shifting token number (1.7: 1)
-Entering state 1
-Stack now 0 8 20 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.7: 1)
--> $$ = nterm exp (1.7: 1)
-Entering state 12
-Stack now 0 8 20 4 12
-Reading a token
-Next token is token '+' (1.9: )
-Shifting token '+' (1.9: )
-Entering state 20
-Stack now 0 8 20 4 12 20
-Reading a token
-Next token is token number (1.11: 1)
-Shifting token number (1.11: 1)
-Entering state 1
-Stack now 0 8 20 4 12 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.11: 1)
--> $$ = nterm exp (1.11: 1)
-Entering state 29
-Stack now 0 8 20 4 12 20 29
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.7: 1)
-   $2 = token '+' (1.9: )
-   $3 = nterm exp (1.11: 1)
--> $$ = nterm exp (1.7-11: 2)
-Entering state 12
-Stack now 0 8 20 4 12
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 20
-Stack now 0 8 20 4 12 20
-Reading a token
-Next token is token number (1.15: 1)
-Shifting token number (1.15: 1)
-Entering state 1
-Stack now 0 8 20 4 12 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.15: 1)
--> $$ = nterm exp (1.15: 1)
-Entering state 29
-Stack now 0 8 20 4 12 20 29
-Reading a token
-Next token is token '+' (1.17: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.7-11: 2)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15: 1)
--> $$ = nterm exp (1.7-15: 3)
-Entering state 12
-Stack now 0 8 20 4 12
-Next token is token '+' (1.17: )
-Shifting token '+' (1.17: )
-Entering state 20
-Stack now 0 8 20 4 12 20
-Reading a token
-Next token is token ')' (1.18: )
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' (1.17: )
-Stack now 0 8 20 4 12
-Error: popping nterm exp (1.7-15: 3)
-Stack now 0 8 20 4
-Shifting token error (1.7-18: )
-Entering state 11
-Stack now 0 8 20 4 11
-Next token is token ')' (1.18: )
-Shifting token ')' (1.18: )
-Entering state 25
-Stack now 0 8 20 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.6: )
-   $2 = token error (1.7-18: )
-   $3 = token ')' (1.18: )
--> $$ = nterm exp (1.6-18: 1111)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '+' (1.20: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1-2: 1111)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6-18: 1111)
--> $$ = nterm exp (1.1-18: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.20: )
-Shifting token '+' (1.20: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '(' (1.22: )
-Shifting token '(' (1.22: )
-Entering state 4
-Stack now 0 8 20 4
-Reading a token
-Next token is token '*' (1.23: )
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.23: )
-Entering state 11
-Stack now 0 8 20 4 11
-Next token is token '*' (1.23: )
-Error: discarding token '*' (1.23: )
-Error: popping token error (1.23: )
-Stack now 0 8 20 4
-Shifting token error (1.23: )
-Entering state 11
-Stack now 0 8 20 4 11
-Reading a token
-Next token is token '*' (1.25: )
-Error: discarding token '*' (1.25: )
-Error: popping token error (1.23: )
-Stack now 0 8 20 4
-Shifting token error (1.23-25: )
-Entering state 11
-Stack now 0 8 20 4 11
-Reading a token
-Next token is token '*' (1.27: )
-Error: discarding token '*' (1.27: )
-Error: popping token error (1.23-25: )
-Stack now 0 8 20 4
-Shifting token error (1.23-27: )
-Entering state 11
-Stack now 0 8 20 4 11
-Reading a token
-Next token is token ')' (1.28: )
-Shifting token ')' (1.28: )
-Entering state 25
-Stack now 0 8 20 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.22: )
-   $2 = token error (1.23-27: )
-   $3 = token ')' (1.28: )
--> $$ = nterm exp (1.22-28: 1111)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '+' (1.30: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1-18: 2222)
-   $2 = token '+' (1.20: )
-   $3 = nterm exp (1.22-28: 1111)
--> $$ = nterm exp (1.1-28: 3333)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.30: )
-Shifting token '+' (1.30: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '(' (1.32: )
-Shifting token '(' (1.32: )
-Entering state 4
-Stack now 0 8 20 4
-Reading a token
-Next token is token number (1.33: 1)
-Shifting token number (1.33: 1)
-Entering state 1
-Stack now 0 8 20 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.33: 1)
--> $$ = nterm exp (1.33: 1)
-Entering state 12
-Stack now 0 8 20 4 12
-Reading a token
-Next token is token '*' (1.35: )
-Shifting token '*' (1.35: )
-Entering state 21
-Stack now 0 8 20 4 12 21
-Reading a token
-Next token is token number (1.37: 2)
-Shifting token number (1.37: 2)
-Entering state 1
-Stack now 0 8 20 4 12 21 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.37: 2)
--> $$ = nterm exp (1.37: 2)
-Entering state 30
-Stack now 0 8 20 4 12 21 30
-Reading a token
-Next token is token '*' (1.39: )
-Reducing stack by rule 9 (line 92):
-   $1 = nterm exp (1.33: 1)
-   $2 = token '*' (1.35: )
-   $3 = nterm exp (1.37: 2)
--> $$ = nterm exp (1.33-37: 2)
-Entering state 12
-Stack now 0 8 20 4 12
-Next token is token '*' (1.39: )
-Shifting token '*' (1.39: )
-Entering state 21
-Stack now 0 8 20 4 12 21
-Reading a token
-Next token is token '*' (1.41: )
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' (1.39: )
-Stack now 0 8 20 4 12
-Error: popping nterm exp (1.33-37: 2)
-Stack now 0 8 20 4
-Shifting token error (1.33-41: )
-Entering state 11
-Stack now 0 8 20 4 11
-Next token is token '*' (1.41: )
-Error: discarding token '*' (1.41: )
-Error: popping token error (1.33-41: )
-Stack now 0 8 20 4
-Shifting token error (1.33-41: )
-Entering state 11
-Stack now 0 8 20 4 11
-Reading a token
-Next token is token ')' (1.42: )
-Shifting token ')' (1.42: )
-Entering state 25
-Stack now 0 8 20 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.32: )
-   $2 = token error (1.33-41: )
-   $3 = token ')' (1.42: )
--> $$ = nterm exp (1.32-42: 1111)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '=' (1.44: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1-28: 3333)
-   $2 = token '+' (1.30: )
-   $3 = nterm exp (1.32-42: 1111)
--> $$ = nterm exp (1.1-42: 4444)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.44: )
-Shifting token '=' (1.44: )
-Entering state 18
-Stack now 0 8 18
-Reading a token
-Next token is token number (1.46: 1)
-Shifting token number (1.46: 1)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.46: 1)
--> $$ = nterm exp (1.46: 1)
-Entering state 27
-Stack now 0 8 18 27
-Reading a token
-Next token is token '\n' (1.47-2.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-42: 4444)
-   $2 = token '=' (1.44: )
-   $3 = nterm exp (1.46: 1)
-1.1-46: error: 4444 != 1
--> $$ = nterm exp (1.1-46: 4444)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.47-2.0: )
-Shifting token '\n' (1.47-2.0: )
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-46: 4444)
-   $2 = token '\n' (1.47-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-stderr:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1441:  $PREPARSER ./calc  input
-./calc.at:1451: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token ')' (1.2: )
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token ')' (1.2: )
-Shifting token ')' (1.2: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.2: )
--> $$ = nterm exp (1.1-2: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '(' (1.6: )
-Shifting token '(' (1.6: )
-Entering state 4
-Stack now 0 8 20 4
-Reading a token
-Next token is token number (1.7: 1)
-Shifting token number (1.7: 1)
-Entering state 1
-Stack now 0 8 20 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.7: 1)
--> $$ = nterm exp (1.7: 1)
-Entering state 12
-Stack now 0 8 20 4 12
-Reading a token
-Next token is token '+' (1.9: )
-Shifting token '+' (1.9: )
-Entering state 20
-Stack now 0 8 20 4 12 20
-Reading a token
-Next token is token number (1.11: 1)
-Shifting token number (1.11: 1)
-Entering state 1
-Stack now 0 8 20 4 12 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.11: 1)
--> $$ = nterm exp (1.11: 1)
-Entering state 29
-Stack now 0 8 20 4 12 20 29
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.7: 1)
-   $2 = token '+' (1.9: )
-   $3 = nterm exp (1.11: 1)
--> $$ = nterm exp (1.7-11: 2)
-Entering state 12
-Stack now 0 8 20 4 12
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 20
-Stack now 0 8 20 4 12 20
-Reading a token
-Next token is token number (1.15: 1)
-Shifting token number (1.15: 1)
-Entering state 1
-Stack now 0 8 20 4 12 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.15: 1)
--> $$ = nterm exp (1.15: 1)
-Entering state 29
-Stack now 0 8 20 4 12 20 29
-Reading a token
-Next token is token '+' (1.17: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.7-11: 2)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15: 1)
--> $$ = nterm exp (1.7-15: 3)
-Entering state 12
-Stack now 0 8 20 4 12
-Next token is token '+' (1.17: )
-Shifting token '+' (1.17: )
-Entering state 20
-Stack now 0 8 20 4 12 20
-Reading a token
-Next token is token ')' (1.18: )
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' (1.17: )
-Stack now 0 8 20 4 12
-Error: popping nterm exp (1.7-15: 3)
-Stack now 0 8 20 4
-Shifting token error (1.7-18: )
-Entering state 11
-Stack now 0 8 20 4 11
-Next token is token ')' (1.18: )
-Shifting token ')' (1.18: )
-Entering state 25
-Stack now 0 8 20 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.6: )
-   $2 = token error (1.7-18: )
-   $3 = token ')' (1.18: )
--> $$ = nterm exp (1.6-18: 1111)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '+' (1.20: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1-2: 1111)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6-18: 1111)
--> $$ = nterm exp (1.1-18: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.20: )
-Shifting token '+' (1.20: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '(' (1.22: )
-Shifting token '(' (1.22: )
-Entering state 4
-Stack now 0 8 20 4
-Reading a token
-Next token is token '*' (1.23: )
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.23: )
-Entering state 11
-Stack now 0 8 20 4 11
-Next token is token '*' (1.23: )
-Error: discarding token '*' (1.23: )
-Error: popping token error (1.23: )
-Stack now 0 8 20 4
-Shifting token error (1.23: )
-Entering state 11
-Stack now 0 8 20 4 11
-Reading a token
-Next token is token '*' (1.25: )
-Error: discarding token '*' (1.25: )
-Error: popping token error (1.23: )
-Stack now 0 8 20 4
-Shifting token error (1.23-25: )
-Entering state 11
-Stack now 0 8 20 4 11
-Reading a token
-Next token is token '*' (1.27: )
-Error: discarding token '*' (1.27: )
-Error: popping token error (1.23-25: )
-Stack now 0 8 20 4
-Shifting token error (1.23-27: )
-Entering state 11
-Stack now 0 8 20 4 11
-Reading a token
-Next token is token ')' (1.28: )
-Shifting token ')' (1.28: )
-Entering state 25
-Stack now 0 8 20 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.22: )
-   $2 = token error (1.23-27: )
-   $3 = token ')' (1.28: )
--> $$ = nterm exp (1.22-28: 1111)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '+' (1.30: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1-18: 2222)
-   $2 = token '+' (1.20: )
-   $3 = nterm exp (1.22-28: 1111)
--> $$ = nterm exp (1.1-28: 3333)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.30: )
-Shifting token '+' (1.30: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '(' (1.32: )
-Shifting token '(' (1.32: )
-Entering state 4
-Stack now 0 8 20 4
-Reading a token
-Next token is token number (1.33: 1)
-Shifting token number (1.33: 1)
-Entering state 1
-Stack now 0 8 20 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.33: 1)
--> $$ = nterm exp (1.33: 1)
-Entering state 12
-Stack now 0 8 20 4 12
-Reading a token
-Next token is token '*' (1.35: )
-Shifting token '*' (1.35: )
-Entering state 21
-Stack now 0 8 20 4 12 21
-Reading a token
-Next token is token number (1.37: 2)
-Shifting token number (1.37: 2)
-Entering state 1
-Stack now 0 8 20 4 12 21 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.37: 2)
--> $$ = nterm exp (1.37: 2)
-Entering state 30
-Stack now 0 8 20 4 12 21 30
-Reading a token
-Next token is token '*' (1.39: )
-Reducing stack by rule 9 (line 92):
-   $1 = nterm exp (1.33: 1)
-   $2 = token '*' (1.35: )
-   $3 = nterm exp (1.37: 2)
--> $$ = nterm exp (1.33-37: 2)
-Entering state 12
-Stack now 0 8 20 4 12
-Next token is token '*' (1.39: )
-Shifting token '*' (1.39: )
-Entering state 21
-Stack now 0 8 20 4 12 21
-Reading a token
-Next token is token '*' (1.41: )
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' (1.39: )
-Stack now 0 8 20 4 12
-Error: popping nterm exp (1.33-37: 2)
-Stack now 0 8 20 4
-Shifting token error (1.33-41: )
-Entering state 11
-Stack now 0 8 20 4 11
-Next token is token '*' (1.41: )
-Error: discarding token '*' (1.41: )
-Error: popping token error (1.33-41: )
-Stack now 0 8 20 4
-Shifting token error (1.33-41: )
-Entering state 11
-Stack now 0 8 20 4 11
-Reading a token
-Next token is token ')' (1.42: )
-Shifting token ')' (1.42: )
-Entering state 25
-Stack now 0 8 20 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.32: )
-   $2 = token error (1.33-41: )
-   $3 = token ')' (1.42: )
--> $$ = nterm exp (1.32-42: 1111)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '=' (1.44: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1-28: 3333)
-   $2 = token '+' (1.30: )
-   $3 = nterm exp (1.32-42: 1111)
--> $$ = nterm exp (1.1-42: 4444)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.44: )
-Shifting token '=' (1.44: )
-Entering state 18
-Stack now 0 8 18
-Reading a token
-Next token is token number (1.46: 1)
-Shifting token number (1.46: 1)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.46: 1)
--> $$ = nterm exp (1.46: 1)
-Entering state 27
-Stack now 0 8 18 27
-Reading a token
-Next token is token '\n' (1.47-2.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-42: 4444)
-   $2 = token '=' (1.44: )
-   $3 = nterm exp (1.46: 1)
-1.1-46: error: 4444 != 1
--> $$ = nterm exp (1.1-46: 4444)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.47-2.0: )
-Shifting token '\n' (1.47-2.0: )
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-46: 4444)
-   $2 = token '\n' (1.47-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1440: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1440: cat stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
-Stack now 0 8 20 29 21
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 20 29 21 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Stack now 0 8 20 29 21 30
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Stack now 0 8 20 29
-Next token is token '=' (1.11: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 18
-Stack now 0 8 18
-Reading a token
-Next token is token number (1.13: 7)
-Shifting token number (1.13: 7)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.13: 7)
--> $$ = nterm exp (1.13: 7)
-Entering state 27
-Stack now 0 8 18 27
-Reading a token
-Next token is token '\n' (1.14-2.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-9: 7)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13: 7)
--> $$ = nterm exp (1.1-13: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.14-2.0: )
-Shifting token '\n' (1.14-2.0: )
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-13: 7)
-   $2 = token '\n' (1.14-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token number (2.1: 1)
-Shifting token number (2.1: 1)
-Entering state 1
-Stack now 0 6 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (2.1: 1)
--> $$ = nterm exp (2.1: 1)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '+' (2.3: )
-Shifting token '+' (2.3: )
-Entering state 20
-Stack now 0 6 8 20
-Reading a token
-Next token is token number (2.5: 2)
-Shifting token number (2.5: 2)
-Entering state 1
-Stack now 0 6 8 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (2.5: 2)
--> $$ = nterm exp (2.5: 2)
-Entering state 29
-Stack now 0 6 8 20 29
-Reading a token
-Next token is token '*' (2.7: )
-Shifting token '*' (2.7: )
-Entering state 21
-Stack now 0 6 8 20 29 21
-Reading a token
-Next token is token '-' (2.9: )
-Shifting token '-' (2.9: )
-Entering state 2
-Stack now 0 6 8 20 29 21 2
-Reading a token
-Next token is token number (2.10: 3)
-Shifting token number (2.10: 3)
-Entering state 1
-Stack now 0 6 8 20 29 21 2 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (2.10: 3)
--> $$ = nterm exp (2.10: 3)
-Entering state 10
-Stack now 0 6 8 20 29 21 2 10
-Reading a token
-Next token is token '=' (2.12: )
-Reducing stack by rule 11 (line 102):
-   $1 = token '-' (2.9: )
-   $2 = nterm exp (2.10: 3)
--> $$ = nterm exp (2.9-10: -3)
-Entering state 30
-Stack now 0 6 8 20 29 21 30
-Next token is token '=' (2.12: )
-Reducing stack by rule 9 (line 92):
-   $1 = nterm exp (2.5: 2)
-   $2 = token '*' (2.7: )
-   $3 = nterm exp (2.9-10: -3)
--> $$ = nterm exp (2.5-10: -6)
-Entering state 29
-Stack now 0 6 8 20 29
-Next token is token '=' (2.12: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (2.1: 1)
-   $2 = token '+' (2.3: )
-   $3 = nterm exp (2.5-10: -6)
--> $$ = nterm exp (2.1-10: -5)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (2.12: )
-Shifting token '=' (2.12: )
-Entering state 18
-Stack now 0 6 8 18
-Reading a token
-Next token is token '-' (2.14: )
-Shifting token '-' (2.14: )
-Entering state 2
-Stack now 0 6 8 18 2
-Reading a token
-Next token is token number (2.15: 5)
-Shifting token number (2.15: 5)
-Entering state 1
-Stack now 0 6 8 18 2 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (2.15: 5)
--> $$ = nterm exp (2.15: 5)
-Entering state 10
-Stack now 0 6 8 18 2 10
-Reading a token
-Next token is token '\n' (2.16-3.0: )
-Reducing stack by rule 11 (line 102):
-   $1 = token '-' (2.14: )
-   $2 = nterm exp (2.15: 5)
--> $$ = nterm exp (2.14-15: -5)
-Entering state 27
-Stack now 0 6 8 18 27
-Next token is token '\n' (2.16-3.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (2.1-10: -5)
-   $2 = token '=' (2.12: )
-   $3 = nterm exp (2.14-15: -5)
--> $$ = nterm exp (2.1-15: -5)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (2.16-3.0: )
-Shifting token '\n' (2.16-3.0: )
-Entering state 24
-Stack now 0 6 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (2.1-15: -5)
-   $2 = token '\n' (2.16-3.0: )
--> $$ = nterm line (2.1-3.0: )
-Entering state 17
-Stack now 0 6 17
-Reducing stack by rule 2 (line 70):
-   $1 = nterm input (1.1-2.0: )
-   $2 = nterm line (2.1-3.0: )
--> $$ = nterm input (1.1-3.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '\n' (3.1-4.0: )
-Shifting token '\n' (3.1-4.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 74):
-   $1 = token '\n' (3.1-4.0: )
--> $$ = nterm line (3.1-4.0: )
-Entering state 17
-Stack now 0 6 17
-Reducing stack by rule 2 (line 70):
-   $1 = nterm input (1.1-3.0: )
-   $2 = nterm line (3.1-4.0: )
--> $$ = nterm input (1.1-4.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '-' (4.1: )
-Shifting token '-' (4.1: )
-Entering state 2
-Stack now 0 6 2
-Reading a token
-Next token is token number (4.2: 1)
-Shifting token number (4.2: 1)
-Entering state 1
-Stack now 0 6 2 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (4.2: 1)
--> $$ = nterm exp (4.2: 1)
-Entering state 10
-Stack now 0 6 2 10
-Reading a token
-Next token is token '^' (4.3: )
-Shifting token '^' (4.3: )
-Entering state 23
-Stack now 0 6 2 10 23
-Reading a token
-Next token is token number (4.4: 2)
-Shifting token number (4.4: 2)
-Entering state 1
-Stack now 0 6 2 10 23 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (4.4: 2)
--> $$ = nterm exp (4.4: 2)
-Entering state 32
-Stack now 0 6 2 10 23 32
-Reading a token
-Next token is token '=' (4.6: )
-Reducing stack by rule 12 (line 103):
-   $1 = nterm exp (4.2: 1)
-   $2 = token '^' (4.3: )
-   $3 = nterm exp (4.4: 2)
--> $$ = nterm exp (4.2-4: 1)
-Entering state 10
-Stack now 0 6 2 10
-Next token is token '=' (4.6: )
-Reducing stack by rule 11 (line 102):
-   $1 = token '-' (4.1: )
-   $2 = nterm exp (4.2-4: 1)
--> $$ = nterm exp (4.1-4: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (4.6: )
-Shifting token '=' (4.6: )
-Entering state 18
-Stack now 0 6 8 18
-Reading a token
-Next token is token '-' (4.8: )
-Shifting token '-' (4.8: )
-Entering state 2
-Stack now 0 6 8 18 2
-Reading a token
-Next token is token number (4.9: 1)
-Shifting token number (4.9: 1)
-Entering state 1
-Stack now 0 6 8 18 2 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (4.9: 1)
--> $$ = nterm exp (4.9: 1)
-Entering state 10
-Stack now 0 6 8 18 2 10
-Reading a token
-Next token is token '\n' (4.10-5.0: )
-Reducing stack by rule 11 (line 102):
-   $1 = token '-' (4.8: )
-   $2 = nterm exp (4.9: 1)
--> $$ = nterm exp (4.8-9: -1)
-Entering state 27
-Stack now 0 6 8 18 27
-Next token is token '\n' (4.10-5.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (4.1-4: -1)
-   $2 = token '=' (4.6: )
-   $3 = nterm exp (4.8-9: -1)
--> $$ = nterm exp (4.1-9: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (4.10-5.0: )
-Shifting token '\n' (4.10-5.0: )
-Entering state 24
-Stack now 0 6 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (4.1-9: -1)
-   $2 = token '\n' (4.10-5.0: )
--> $$ = nterm line (4.1-5.0: )
-Entering state 17
-Stack now 0 6 17
-Reducing stack by rule 2 (line 70):
-   $1 = nterm input (1.1-4.0: )
-   $2 = nterm line (4.1-5.0: )
--> $$ = nterm input (1.1-5.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '(' (5.1: )
-Shifting token '(' (5.1: )
-Entering state 4
-Stack now 0 6 4
-Reading a token
-Next token is token '-' (5.2: )
-Shifting token '-' (5.2: )
-Entering state 2
-Stack now 0 6 4 2
-Reading a token
-Next token is token number (5.3: 1)
-Shifting token number (5.3: 1)
-Entering state 1
-Stack now 0 6 4 2 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (5.3: 1)
--> $$ = nterm exp (5.3: 1)
-Entering state 10
-Stack now 0 6 4 2 10
-Reading a token
-Next token is token ')' (5.4: )
-Reducing stack by rule 11 (line 102):
-   $1 = token '-' (5.2: )
-   $2 = nterm exp (5.3: 1)
--> $$ = nterm exp (5.2-3: -1)
-Entering state 12
-Stack now 0 6 4 12
-Next token is token ')' (5.4: )
-Shifting token ')' (5.4: )
-Entering state 26
-Stack now 0 6 4 12 26
-Reducing stack by rule 13 (line 104):
-   $1 = token '(' (5.1: )
-   $2 = nterm exp (5.2-3: -1)
-   $3 = token ')' (5.4: )
--> $$ = nterm exp (5.1-4: -1)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '^' (5.5: )
-Shifting token '^' (5.5: )
-Entering state 23
-Stack now 0 6 8 23
-Reading a token
-Next token is token number (5.6: 2)
-Shifting token number (5.6: 2)
-Entering state 1
-Stack now 0 6 8 23 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (5.6: 2)
--> $$ = nterm exp (5.6: 2)
-Entering state 32
-Stack now 0 6 8 23 32
-Reading a token
-Next token is token '=' (5.8: )
-Reducing stack by rule 12 (line 103):
-   $1 = nterm exp (5.1-4: -1)
-   $2 = token '^' (5.5: )
-   $3 = nterm exp (5.6: 2)
--> $$ = nterm exp (5.1-6: 1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (5.8: )
-Shifting token '=' (5.8: )
-Entering state 18
-Stack now 0 6 8 18
-Reading a token
-Next token is token number (5.10: 1)
-Shifting token number (5.10: 1)
-Entering state 1
-Stack now 0 6 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (5.10: 1)
--> $$ = nterm exp (5.10: 1)
-Entering state 27
-Stack now 0 6 8 18 27
-Reading a token
-Next token is token '\n' (5.11-6.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (5.1-6: 1)
-   $2 = token '=' (5.8: )
-   $3 = nterm exp (5.10: 1)
--> $$ = nterm exp (5.1-10: 1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (5.11-6.0: )
-Shifting token '\n' (5.11-6.0: )
-Entering state 24
-Stack now 0 6 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (5.1-10: 1)
-   $2 = token '\n' (5.11-6.0: )
--> $$ = nterm line (5.1-6.0: )
-Entering state 17
-Stack now 0 6 17
-Reducing stack by rule 2 (line 70):
-   $1 = nterm input (1.1-5.0: )
-   $2 = nterm line (5.1-6.0: )
--> $$ = nterm input (1.1-6.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '\n' (6.1-7.0: )
-Shifting token '\n' (6.1-7.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 74):
-   $1 = token '\n' (6.1-7.0: )
--> $$ = nterm line (6.1-7.0: )
-Entering state 17
-Stack now 0 6 17
-Reducing stack by rule 2 (line 70):
-   $1 = nterm input (1.1-6.0: )
-   $2 = nterm line (6.1-7.0: )
--> $$ = nterm input (1.1-7.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '-' (7.1: )
-Shifting token '-' (7.1: )
-Entering state 2
-Stack now 0 6 2
-Reading a token
-Next token is token '-' (7.2: )
-Shifting token '-' (7.2: )
-Entering state 2
-Stack now 0 6 2 2
-Reading a token
-Next token is token '-' (7.3: )
-Shifting token '-' (7.3: )
-Entering state 2
-Stack now 0 6 2 2 2
-Reading a token
-Next token is token number (7.4: 1)
-Shifting token number (7.4: 1)
-Entering state 1
-Stack now 0 6 2 2 2 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (7.4: 1)
--> $$ = nterm exp (7.4: 1)
-Entering state 10
-Stack now 0 6 2 2 2 10
-Reading a token
-Next token is token '=' (7.6: )
-Reducing stack by rule 11 (line 102):
-   $1 = token '-' (7.3: )
-   $2 = nterm exp (7.4: 1)
--> $$ = nterm exp (7.3-4: -1)
-Entering state 10
-Stack now 0 6 2 2 10
-Next token is token '=' (7.6: )
-Reducing stack by rule 11 (line 102):
-   $1 = token '-' (7.2: )
-   $2 = nterm exp (7.3-4: -1)
--> $$ = nterm exp (7.2-4: 1)
-Entering state 10
-Stack now 0 6 2 10
-Next token is token '=' (7.6: )
-Reducing stack by rule 11 (line 102):
-   $1 = token '-' (7.1: )
-   $2 = nterm exp (7.2-4: 1)
--> $$ = nterm exp (7.1-4: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (7.6: )
-Shifting token '=' (7.6: )
-Entering state 18
-Stack now 0 6 8 18
-Reading a token
-Next token is token '-' (7.8: )
-Shifting token '-' (7.8: )
-Entering state 2
-Stack now 0 6 8 18 2
-Reading a token
-Next token is token number (7.9: 1)
-Shifting token number (7.9: 1)
-Entering state 1
-Stack now 0 6 8 18 2 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (7.9: 1)
--> $$ = nterm exp (7.9: 1)
-Entering state 10
-Stack now 0 6 8 18 2 10
-Reading a token
-Next token is token '\n' (7.10-8.0: )
-Reducing stack by rule 11 (line 102):
-   $1 = token '-' (7.8: )
-   $2 = nterm exp (7.9: 1)
--> $$ = nterm exp (7.8-9: -1)
-Entering state 27
-Stack now 0 6 8 18 27
-Next token is token '\n' (7.10-8.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (7.1-4: -1)
-   $2 = token '=' (7.6: )
-   $3 = nterm exp (7.8-9: -1)
--> $$ = nterm exp (7.1-9: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (7.10-8.0: )
-Shifting token '\n' (7.10-8.0: )
-Entering state 24
-Stack now 0 6 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (7.1-9: -1)
-   $2 = token '\n' (7.10-8.0: )
--> $$ = nterm line (7.1-8.0: )
-Entering state 17
-Stack now 0 6 17
-Reducing stack by rule 2 (line 70):
-   $1 = nterm input (1.1-7.0: )
-   $2 = nterm line (7.1-8.0: )
--> $$ = nterm input (1.1-8.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '\n' (8.1-9.0: )
-Shifting token '\n' (8.1-9.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 74):
-   $1 = token '\n' (8.1-9.0: )
--> $$ = nterm line (8.1-9.0: )
-Entering state 17
-Stack now 0 6 17
-Reducing stack by rule 2 (line 70):
-   $1 = nterm input (1.1-8.0: )
-   $2 = nterm line (8.1-9.0: )
--> $$ = nterm input (1.1-9.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token number (9.1: 1)
-Shifting token number (9.1: 1)
-Entering state 1
-Stack now 0 6 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (9.1: 1)
--> $$ = nterm exp (9.1: 1)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '-' (9.3: )
-Shifting token '-' (9.3: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token number (9.5: 2)
-Shifting token number (9.5: 2)
-Entering state 1
-Stack now 0 6 8 19 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (9.5: 2)
--> $$ = nterm exp (9.5: 2)
-Entering state 28
-Stack now 0 6 8 19 28
-Reading a token
-Next token is token '-' (9.7: )
-Reducing stack by rule 8 (line 91):
-   $1 = nterm exp (9.1: 1)
-   $2 = token '-' (9.3: )
-   $3 = nterm exp (9.5: 2)
--> $$ = nterm exp (9.1-5: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '-' (9.7: )
-Shifting token '-' (9.7: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token number (9.9: 3)
-Shifting token number (9.9: 3)
-Entering state 1
-Stack now 0 6 8 19 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (9.9: 3)
--> $$ = nterm exp (9.9: 3)
-Entering state 28
-Stack now 0 6 8 19 28
-Reading a token
-Next token is token '=' (9.11: )
-Reducing stack by rule 8 (line 91):
-   $1 = nterm exp (9.1-5: -1)
-   $2 = token '-' (9.7: )
-   $3 = nterm exp (9.9: 3)
--> $$ = nterm exp (9.1-9: -4)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (9.11: )
-Shifting token '=' (9.11: )
-Entering state 18
-Stack now 0 6 8 18
-Reading a token
-Next token is token '-' (9.13: )
-Shifting token '-' (9.13: )
-Entering state 2
-Stack now 0 6 8 18 2
-Reading a token
-Next token is token number (9.14: 4)
-Shifting token number (9.14: 4)
-Entering state 1
-Stack now 0 6 8 18 2 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (9.14: 4)
--> $$ = nterm exp (9.14: 4)
-Entering state 10
-Stack now 0 6 8 18 2 10
-Reading a token
-Next token is token '\n' (9.15-10.0: )
-Reducing stack by rule 11 (line 102):
-   $1 = token '-' (9.13: )
-   $2 = nterm exp (9.14: 4)
--> $$ = nterm exp (9.13-14: -4)
-Entering state 27
-Stack now 0 6 8 18 27
-Next token is token '\n' (9.15-10.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (9.1-9: -4)
-   $2 = token '=' (9.11: )
-   $3 = nterm exp (9.13-14: -4)
--> $$ = nterm exp (9.1-14: -4)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (9.15-10.0: )
-Shifting token '\n' (9.15-10.0: )
-Entering state 24
-Stack now 0 6 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (9.1-14: -4)
-   $2 = token '\n' (9.15-10.0: )
--> $$ = nterm line (9.1-10.0: )
-Entering state 17
-Stack now 0 6 17
-Reducing stack by rule 2 (line 70):
-   $1 = nterm input (1.1-9.0: )
-   $2 = nterm line (9.1-10.0: )
--> $$ = nterm input (1.1-10.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token number (10.1: 1)
-Shifting token number (10.1: 1)
-Entering state 1
-Stack now 0 6 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (10.1: 1)
--> $$ = nterm exp (10.1: 1)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '-' (10.3: )
-Shifting token '-' (10.3: )
-Entering state 19
-Stack now 0 6 8 19
-Reading a token
-Next token is token '(' (10.5: )
-Shifting token '(' (10.5: )
-Entering state 4
-Stack now 0 6 8 19 4
-Reading a token
-Next token is token number (10.6: 2)
-Shifting token number (10.6: 2)
-Entering state 1
-Stack now 0 6 8 19 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (10.6: 2)
--> $$ = nterm exp (10.6: 2)
-Entering state 12
-Stack now 0 6 8 19 4 12
-Reading a token
-Next token is token '-' (10.8: )
-Shifting token '-' (10.8: )
-Entering state 19
-Stack now 0 6 8 19 4 12 19
-Reading a token
-Next token is token number (10.10: 3)
-Shifting token number (10.10: 3)
-Entering state 1
-Stack now 0 6 8 19 4 12 19 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (10.10: 3)
--> $$ = nterm exp (10.10: 3)
-Entering state 28
-Stack now 0 6 8 19 4 12 19 28
-Reading a token
-Next token is token ')' (10.11: )
-Reducing stack by rule 8 (line 91):
-   $1 = nterm exp (10.6: 2)
-   $2 = token '-' (10.8: )
-   $3 = nterm exp (10.10: 3)
--> $$ = nterm exp (10.6-10: -1)
-Entering state 12
-Stack now 0 6 8 19 4 12
-Next token is token ')' (10.11: )
-Shifting token ')' (10.11: )
-Entering state 26
-Stack now 0 6 8 19 4 12 26
-Reducing stack by rule 13 (line 104):
-   $1 = token '(' (10.5: )
-   $2 = nterm exp (10.6-10: -1)
-   $3 = token ')' (10.11: )
--> $$ = nterm exp (10.5-11: -1)
-Entering state 28
-Stack now 0 6 8 19 28
-Reading a token
-Next token is token '=' (10.13: )
-Reducing stack by rule 8 (line 91):
-   $1 = nterm exp (10.1: 1)
-   $2 = token '-' (10.3: )
-   $3 = nterm exp (10.5-11: -1)
--> $$ = nterm exp (10.1-11: 2)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (10.13: )
-Shifting token '=' (10.13: )
-Entering state 18
-Stack now 0 6 8 18
-Reading a token
-Next token is token number (10.15: 2)
-Shifting token number (10.15: 2)
-Entering state 1
-Stack now 0 6 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (10.15: 2)
--> $$ = nterm exp (10.15: 2)
-Entering state 27
-Stack now 0 6 8 18 27
-Reading a token
-Next token is token '\n' (10.16-11.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (10.1-11: 2)
-   $2 = token '=' (10.13: )
-   $3 = nterm exp (10.15: 2)
--> $$ = nterm exp (10.1-15: 2)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (10.16-11.0: )
-Shifting token '\n' (10.16-11.0: )
-Entering state 24
-Stack now 0 6 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (10.1-15: 2)
-   $2 = token '\n' (10.16-11.0: )
--> $$ = nterm line (10.1-11.0: )
-Entering state 17
-Stack now 0 6 17
-Reducing stack by rule 2 (line 70):
-   $1 = nterm input (1.1-10.0: )
-   $2 = nterm line (10.1-11.0: )
--> $$ = nterm input (1.1-11.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '\n' (11.1-12.0: )
-Shifting token '\n' (11.1-12.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 74):
-   $1 = token '\n' (11.1-12.0: )
--> $$ = nterm line (11.1-12.0: )
-Entering state 17
-Stack now 0 6 17
-Reducing stack by rule 2 (line 70):
-   $1 = nterm input (1.1-11.0: )
-   $2 = nterm line (11.1-12.0: )
--> $$ = nterm input (1.1-12.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token number (12.1: 2)
-Shifting token number (12.1: 2)
-Entering state 1
-Stack now 0 6 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (12.1: 2)
--> $$ = nterm exp (12.1: 2)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '^' (12.2: )
-Shifting token '^' (12.2: )
-Entering state 23
-Stack now 0 6 8 23
-Reading a token
-Next token is token number (12.3: 2)
-Shifting token number (12.3: 2)
-Entering state 1
-Stack now 0 6 8 23 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (12.3: 2)
--> $$ = nterm exp (12.3: 2)
-Entering state 32
-Stack now 0 6 8 23 32
-Reading a token
-Next token is token '^' (12.4: )
-Shifting token '^' (12.4: )
-Entering state 23
-Stack now 0 6 8 23 32 23
-Reading a token
-Next token is token number (12.5: 3)
-Shifting token number (12.5: 3)
-Entering state 1
-Stack now 0 6 8 23 32 23 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (12.5: 3)
--> $$ = nterm exp (12.5: 3)
-Entering state 32
-Stack now 0 6 8 23 32 23 32
-Reading a token
-Next token is token '=' (12.7: )
-Reducing stack by rule 12 (line 103):
-   $1 = nterm exp (12.3: 2)
-   $2 = token '^' (12.4: )
-   $3 = nterm exp (12.5: 3)
--> $$ = nterm exp (12.3-5: 8)
-Entering state 32
-Stack now 0 6 8 23 32
-Next token is token '=' (12.7: )
-Reducing stack by rule 12 (line 103):
-   $1 = nterm exp (12.1: 2)
-   $2 = token '^' (12.2: )
-   $3 = nterm exp (12.3-5: 8)
--> $$ = nterm exp (12.1-5: 256)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (12.7: )
-Shifting token '=' (12.7: )
-Entering state 18
-Stack now 0 6 8 18
-Reading a token
-Next token is token number (12.9-11: 256)
-Shifting token number (12.9-11: 256)
-Entering state 1
-Stack now 0 6 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (12.9-11: 256)
--> $$ = nterm exp (12.9-11: 256)
-Entering state 27
-Stack now 0 6 8 18 27
-Reading a token
-Next token is token '\n' (12.12-13.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (12.1-5: 256)
-   $2 = token '=' (12.7: )
-   $3 = nterm exp (12.9-11: 256)
--> $$ = nterm exp (12.1-11: 256)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (12.12-13.0: )
-Shifting token '\n' (12.12-13.0: )
-Entering state 24
-Stack now 0 6 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (12.1-11: 256)
-   $2 = token '\n' (12.12-13.0: )
--> $$ = nterm line (12.1-13.0: )
-Entering state 17
-Stack now 0 6 17
-Reducing stack by rule 2 (line 70):
-   $1 = nterm input (1.1-12.0: )
-   $2 = nterm line (12.1-13.0: )
--> $$ = nterm input (1.1-13.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '(' (13.1: )
-Shifting token '(' (13.1: )
-Entering state 4
-Stack now 0 6 4
-Reading a token
-Next token is token number (13.2: 2)
-Shifting token number (13.2: 2)
-Entering state 1
-Stack now 0 6 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (13.2: 2)
--> $$ = nterm exp (13.2: 2)
-Entering state 12
-Stack now 0 6 4 12
-Reading a token
-Next token is token '^' (13.3: )
-Shifting token '^' (13.3: )
-Entering state 23
-Stack now 0 6 4 12 23
-Reading a token
-Next token is token number (13.4: 2)
-Shifting token number (13.4: 2)
-Entering state 1
-Stack now 0 6 4 12 23 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (13.4: 2)
--> $$ = nterm exp (13.4: 2)
-Entering state 32
-Stack now 0 6 4 12 23 32
+   $1 = token number (13.4: 2)
+-> $$ = nterm exp (13.4: 2)
+Entering state 32
+Stack now 0 6 4 12 23 32
 Reading a token
 Next token is token ')' (13.5: )
 Reducing stack by rule 12 (line 103):
@@ -136676,8 +134959,20 @@
 Cleanup: popping token end of input (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
 ./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
+./calc.at:1438: cat stderr
+./calc.at:1437: cat stderr
 stderr:
+./calc.at:1435: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1440: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -137696,7 +135991,11 @@
 Cleanup: popping token end of input (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
 ./calc.at:1441: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-  | (!!) + (1 2) = 1
+input:
+  | (# + 1) = 1111
+./calc.at:1438:  $PREPARSER ./calc  input
+input:
+  | 1 = 2 = 3
 ./calc.at:1440:  $PREPARSER ./calc  input
 stderr:
 Starting parse
@@ -137708,121 +136007,77 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-Next token is token '!' (1.2: )
-Shifting token '!' (1.2: )
-Entering state 5
-Stack now 0 4 5
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.1-2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.1-2: )
+Stack now 0 4
+Shifting token error (1.1-2: )
+Entering state 11
+Stack now 0 4 11
 Reading a token
-Next token is token '!' (1.3: )
-Shifting token '!' (1.3: )
-Entering state 15
-Stack now 0 4 5 15
-Reducing stack by rule 16 (line 107):
-   $1 = token '!' (1.2: )
-   $2 = token '!' (1.3: )
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Error: popping token error (1.1-2: )
 Stack now 0 4
-Shifting token error (1.2-3: )
+Shifting token error (1.1-4: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token ')' (1.4: )
-Shifting token ')' (1.4: )
+Next token is token number (1.6: 1)
+Error: discarding token number (1.6: 1)
+Error: popping token error (1.1-4: )
+Stack now 0 4
+Shifting token error (1.1-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
 Entering state 25
 Stack now 0 4 11 25
 Reducing stack by rule 14 (line 105):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-3: )
-   $3 = token ')' (1.4: )
--> $$ = nterm exp (1.1-4: 1111)
+   $2 = token error (1.1-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '+' (1.6: )
-Shifting token '+' (1.6: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '(' (1.8: )
-Shifting token '(' (1.8: )
-Entering state 4
-Stack now 0 8 20 4
-Reading a token
-Next token is token number (1.9: 1)
-Shifting token number (1.9: 1)
-Entering state 1
-Stack now 0 8 20 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.9: 1)
--> $$ = nterm exp (1.9: 1)
-Entering state 12
-Stack now 0 8 20 4 12
-Reading a token
-Next token is token number (1.11: 2)
-1.11: syntax error, unexpected number
-Error: popping nterm exp (1.9: 1)
-Stack now 0 8 20 4
-Shifting token error (1.9-11: )
-Entering state 11
-Stack now 0 8 20 4 11
-Next token is token number (1.11: 2)
-Error: discarding token number (1.11: 2)
-Error: popping token error (1.9-11: )
-Stack now 0 8 20 4
-Shifting token error (1.9-11: )
-Entering state 11
-Stack now 0 8 20 4 11
-Reading a token
-Next token is token ')' (1.12: )
-Shifting token ')' (1.12: )
-Entering state 25
-Stack now 0 8 20 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.8: )
-   $2 = token error (1.9-11: )
-   $3 = token ')' (1.12: )
--> $$ = nterm exp (1.8-12: 1111)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '=' (1.14: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1-4: 1111)
-   $2 = token '+' (1.6: )
-   $3 = nterm exp (1.8-12: 1111)
--> $$ = nterm exp (1.1-12: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.14: )
-Shifting token '=' (1.14: )
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
 Entering state 18
 Stack now 0 8 18
 Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
 Entering state 1
 Stack now 0 8 18 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
 Entering state 27
 Stack now 0 8 18 27
 Reading a token
-Next token is token '\n' (1.17-2.0: )
+Next token is token '\n' (1.15-2.0: )
 Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-12: 2222)
-   $2 = token '=' (1.14: )
-   $3 = nterm exp (1.16: 1)
-1.1-16: error: 2222 != 1
--> $$ = nterm exp (1.1-16: 2222)
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
 Entering state 24
 Stack now 0 8 24
 Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -137839,9 +136094,14 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
+528. calc.at:1437:  ok
+  | 1 2
+./calc.at:1441:  $PREPARSER ./calc  input
+stderr:
+stderr:
+./calc.at:1435: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -137851,121 +136111,77 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-Next token is token '!' (1.2: )
-Shifting token '!' (1.2: )
-Entering state 5
-Stack now 0 4 5
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.1-2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.1-2: )
+Stack now 0 4
+Shifting token error (1.1-2: )
+Entering state 11
+Stack now 0 4 11
 Reading a token
-Next token is token '!' (1.3: )
-Shifting token '!' (1.3: )
-Entering state 15
-Stack now 0 4 5 15
-Reducing stack by rule 16 (line 107):
-   $1 = token '!' (1.2: )
-   $2 = token '!' (1.3: )
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Error: popping token error (1.1-2: )
 Stack now 0 4
-Shifting token error (1.2-3: )
+Shifting token error (1.1-4: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token ')' (1.4: )
-Shifting token ')' (1.4: )
+Next token is token number (1.6: 1)
+Error: discarding token number (1.6: 1)
+Error: popping token error (1.1-4: )
+Stack now 0 4
+Shifting token error (1.1-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
 Entering state 25
 Stack now 0 4 11 25
 Reducing stack by rule 14 (line 105):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-3: )
-   $3 = token ')' (1.4: )
--> $$ = nterm exp (1.1-4: 1111)
+   $2 = token error (1.1-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '+' (1.6: )
-Shifting token '+' (1.6: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '(' (1.8: )
-Shifting token '(' (1.8: )
-Entering state 4
-Stack now 0 8 20 4
-Reading a token
-Next token is token number (1.9: 1)
-Shifting token number (1.9: 1)
-Entering state 1
-Stack now 0 8 20 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.9: 1)
--> $$ = nterm exp (1.9: 1)
-Entering state 12
-Stack now 0 8 20 4 12
-Reading a token
-Next token is token number (1.11: 2)
-1.11: syntax error, unexpected number
-Error: popping nterm exp (1.9: 1)
-Stack now 0 8 20 4
-Shifting token error (1.9-11: )
-Entering state 11
-Stack now 0 8 20 4 11
-Next token is token number (1.11: 2)
-Error: discarding token number (1.11: 2)
-Error: popping token error (1.9-11: )
-Stack now 0 8 20 4
-Shifting token error (1.9-11: )
-Entering state 11
-Stack now 0 8 20 4 11
-Reading a token
-Next token is token ')' (1.12: )
-Shifting token ')' (1.12: )
-Entering state 25
-Stack now 0 8 20 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.8: )
-   $2 = token error (1.9-11: )
-   $3 = token ')' (1.12: )
--> $$ = nterm exp (1.8-12: 1111)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '=' (1.14: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1-4: 1111)
-   $2 = token '+' (1.6: )
-   $3 = nterm exp (1.8-12: 1111)
--> $$ = nterm exp (1.1-12: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.14: )
-Shifting token '=' (1.14: )
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
 Entering state 18
 Stack now 0 8 18
 Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
 Entering state 1
 Stack now 0 8 18 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
 Entering state 27
 Stack now 0 8 18 27
 Reading a token
-Next token is token '\n' (1.17-2.0: )
+Next token is token '\n' (1.15-2.0: )
 Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-12: 2222)
-   $2 = token '=' (1.14: )
-   $3 = nterm exp (1.16: 1)
-1.1-16: error: 2222 != 1
--> $$ = nterm exp (1.1-16: 2222)
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
 Entering state 24
 Stack now 0 8 24
 Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -137982,20 +136198,6 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-  | 1 2
-./calc.at:1441:  $PREPARSER ./calc  input
-./calc.at:1449: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
-stderr:
-./calc.at:1440: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 Starting parse
 Entering state 0
 Stack now 0
@@ -138018,7 +136220,47 @@
 Stack now 0
 ./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1440: cat stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Stack now 0 8 18
+Error: popping token '=' (1.3: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '=' (1.7: )
+Stack now 0
+./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -138039,7 +136281,48 @@
 Stack now 0
 Cleanup: discarding lookahead token number (1.3: 2)
 Stack now 0
-./calc.at:1441: "$PERL" -pi -e 'use strict;
+stderr:
+
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Stack now 0 8 18
+Error: popping token '=' (1.3: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '=' (1.7: )
+Stack now 0
+./calc.at:1438: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -138050,13 +136333,74 @@
   }eg
 ' expout || exit 77
 input:
+./calc.at:1441: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1435:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1438: cat stderr
+./calc.at:1440: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1441: cat stderr
-  | (- *) + (1 2) = 1
-./calc.at:1440:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1440: cat stderr
+./calc.at:1435: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 input:
   | 1//2
-stderr:
 ./calc.at:1441:  $PREPARSER ./calc  input
+input:
+  | (1 + # + 1) = 1111
+./calc.at:1438:  $PREPARSER ./calc  input
+stderr:
+input:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 22
+Stack now 0 8 22
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Stack now 0 8
+Error: popping nterm exp (1.1: 1)
+Stack now 0
+Cleanup: discarding lookahead token '/' (1.3: )
+Stack now 0
+./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 1 + 2 * 3 + !- ++
 Starting parse
 Entering state 0
 Stack now 0
@@ -138066,129 +136410,96 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
-Entering state 2
-Stack now 0 4 2
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
 Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Stack now 0 4 2 9
-Reducing stack by rule 15 (line 106):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Stack now 0 4 12 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
 Stack now 0 4
-Shifting token error (1.2-4: )
+Shifting token error (1.2-6: )
 Entering state 11
 Stack now 0 4 11
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Error: popping token error (1.2-4: )
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
 Stack now 0 4
-Shifting token error (1.2-4: )
+Shifting token error (1.2-6: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Stack now 0 8 20 4
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-8: )
+Entering state 11
+Stack now 0 4 11
 Reading a token
 Next token is token number (1.10: 1)
-Shifting token number (1.10: 1)
-Entering state 1
-Stack now 0 8 20 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.10: 1)
--> $$ = nterm exp (1.10: 1)
-Entering state 12
-Stack now 0 8 20 4 12
-Reading a token
-Next token is token number (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Stack now 0 8 20 4
-Shifting token error (1.10-12: )
-Entering state 11
-Stack now 0 8 20 4 11
-Next token is token number (1.12: 2)
-Error: discarding token number (1.12: 2)
-Error: popping token error (1.10-12: )
-Stack now 0 8 20 4
-Shifting token error (1.10-12: )
+Error: discarding token number (1.10: 1)
+Error: popping token error (1.2-8: )
+Stack now 0 4
+Shifting token error (1.2-10: )
 Entering state 11
-Stack now 0 8 20 4 11
+Stack now 0 4 11
 Reading a token
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
 Entering state 25
-Stack now 0 8 20 4 11 25
+Stack now 0 4 11 25
 Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '=' (1.15: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
 Entering state 18
 Stack now 0 8 18
 Reading a token
-Next token is token number (1.17: 1)
-Shifting token number (1.17: 1)
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
 Entering state 1
 Stack now 0 8 18 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.17: 1)
--> $$ = nterm exp (1.17: 1)
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
 Entering state 27
 Stack now 0 8 18 27
 Reading a token
-Next token is token '\n' (1.18-2.0: )
+Next token is token '\n' (1.19-2.0: )
 Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
 Entering state 24
 Stack now 0 8 24
 Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2222)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -138205,9 +136516,12 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 stderr:
+./calc.at:1435:  $PREPARSER ./calc  input
+  | 
+  | +1
+./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1440:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -138235,7 +136549,9 @@
 Stack now 0
 Cleanup: discarding lookahead token '/' (1.3: )
 Stack now 0
-./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -138245,129 +136561,96 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
-Entering state 2
-Stack now 0 4 2
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
 Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Stack now 0 4 2 9
-Reducing stack by rule 15 (line 106):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Stack now 0 4 12 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
 Stack now 0 4
-Shifting token error (1.2-4: )
+Shifting token error (1.2-6: )
 Entering state 11
 Stack now 0 4 11
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Error: popping token error (1.2-4: )
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
 Stack now 0 4
-Shifting token error (1.2-4: )
+Shifting token error (1.2-6: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Stack now 0 8 20 4
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-8: )
+Entering state 11
+Stack now 0 4 11
 Reading a token
 Next token is token number (1.10: 1)
-Shifting token number (1.10: 1)
-Entering state 1
-Stack now 0 8 20 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.10: 1)
--> $$ = nterm exp (1.10: 1)
-Entering state 12
-Stack now 0 8 20 4 12
-Reading a token
-Next token is token number (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Stack now 0 8 20 4
-Shifting token error (1.10-12: )
-Entering state 11
-Stack now 0 8 20 4 11
-Next token is token number (1.12: 2)
-Error: discarding token number (1.12: 2)
-Error: popping token error (1.10-12: )
-Stack now 0 8 20 4
-Shifting token error (1.10-12: )
+Error: discarding token number (1.10: 1)
+Error: popping token error (1.2-8: )
+Stack now 0 4
+Shifting token error (1.2-10: )
 Entering state 11
-Stack now 0 8 20 4 11
+Stack now 0 4 11
 Reading a token
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
 Entering state 25
-Stack now 0 8 20 4 11 25
+Stack now 0 4 11 25
 Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '=' (1.15: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
 Entering state 18
 Stack now 0 8 18
 Reading a token
-Next token is token number (1.17: 1)
-Shifting token number (1.17: 1)
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
 Entering state 1
 Stack now 0 8 18 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.17: 1)
--> $$ = nterm exp (1.17: 1)
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
 Entering state 27
 Stack now 0 8 18 27
 Reading a token
-Next token is token '\n' (1.18-2.0: )
+Next token is token '\n' (1.19-2.0: )
 Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
 Entering state 24
 Stack now 0 8 24
 Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2222)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -138384,35 +136667,84 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Stack now 0 3
+Reducing stack by rule 3 (line 74):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
 Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 22
-Stack now 0 8 22
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Stack now 0
+Cleanup: discarding lookahead token '+' (2.1: )
+Stack now 0
+./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1441: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
 Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Stack now 0 8
-Error: popping nterm exp (1.1: 1)
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Stack now 0 3
+Reducing stack by rule 3 (line 74):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
 Stack now 0
-Cleanup: discarding lookahead token '/' (1.3: )
+Cleanup: discarding lookahead token '+' (2.1: )
 Stack now 0
-./calc.at:1451: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
+stderr:
+535. calc.at:1448: testing Calculator C++ %header %locations api.location.file=none  ...
+./calc.at:1438: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1441: cat stderr
+./calc.at:1448: mv calc.y.tmp calc.y
+
 ./calc.at:1440: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -138423,7 +136755,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1441: "$PERL" -pi -e 'use strict;
+./calc.at:1435: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -138433,15 +136765,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1440: cat stderr
-./calc.at:1441: cat stderr
-input:
+./calc.at:1438: cat stderr
 input:
   | error
-  | (* *) + (*) + (*)
-./calc.at:1440:  $PREPARSER ./calc  input
 ./calc.at:1441:  $PREPARSER ./calc  input
+./calc.at:1440: cat stderr
 stderr:
+./calc.at:1448: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 Starting parse
 Entering state 0
 Stack now 0
@@ -138450,8 +136780,21 @@
 1.1: syntax error, unexpected invalid token
 Cleanup: discarding lookahead token invalid token (1.1: )
 Stack now 0
-stderr:
 ./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1438:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token invalid token (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token (1.1: )
+Stack now 0
+./calc.at:1440:  $PREPARSER ./calc  /dev/null
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -138461,123 +136804,113 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Stack now 0 8
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
 Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
 Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Stack now 0 8 20 4
-Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Stack now 0 8 20 4 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
-Error: popping token error (1.10: )
-Stack now 0 8 20 4
-Shifting token error (1.10: )
-Entering state 11
-Stack now 0 8 20 4 11
+Stack now 0 4 12 20
 Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 25
-Stack now 0 8 20 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
+Next token is token number (1.6: 1)
+Shifting token number (1.6: 1)
+Entering state 1
+Stack now 0 4 12 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
 Entering state 29
-Stack now 0 8 20 29
+Stack now 0 4 12 20 29
 Reading a token
-Next token is token '+' (1.13: )
+Next token is token ')' (1.7: )
 Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 12 26
+Reducing stack by rule 13 (line 104):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 20
-Stack now 0 8 20
 Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 22
+Stack now 0 8 22
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
 Entering state 4
-Stack now 0 8 20 4
+Stack now 0 8 22 4
 Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Stack now 0 8 20 4 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
-Error: popping token error (1.16: )
-Stack now 0 8 20 4
-Shifting token error (1.16: )
-Entering state 11
-Stack now 0 8 20 4 11
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
+Entering state 1
+Stack now 0 8 22 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Stack now 0 8 22 4 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 19
+Stack now 0 8 22 4 12 19
+Reading a token
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Stack now 0 8 22 4 12 19 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
+Stack now 0 8 22 4 12 19 28
 Reading a token
 Next token is token ')' (1.17: )
+Reducing stack by rule 8 (line 91):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Stack now 0 8 22 4 12
+Next token is token ')' (1.17: )
 Shifting token ')' (1.17: )
-Entering state 25
-Stack now 0 8 20 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
+Entering state 26
+Stack now 0 8 22 4 12 26
+Reducing stack by rule 13 (line 104):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
    $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 29
-Stack now 0 8 20 29
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 31
+Stack now 0 8 22 31
 Reading a token
 Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
+Reducing stack by rule 10 (line 93):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
 Entering state 8
 Stack now 0 8
 Next token is token '\n' (1.18-2.0: )
@@ -138585,7 +136918,7 @@
 Entering state 24
 Stack now 0 8 24
 Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 3333)
+   $1 = nterm exp (1.1-17: 2)
    $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
@@ -138603,16 +136936,18 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token invalid token (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token (1.1: )
-Stack now 0
-./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1435: cat stderr
+./calc.at:1441: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -138623,123 +136958,113 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Error: popping token error (1.2: )
-Stack now 0 4
-Shifting token error (1.2-4: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Stack now 0 8
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
 Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
 Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Stack now 0 8 20 4
-Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Stack now 0 8 20 4 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
-Error: popping token error (1.10: )
-Stack now 0 8 20 4
-Shifting token error (1.10: )
-Entering state 11
-Stack now 0 8 20 4 11
+Stack now 0 4 12 20
 Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 25
-Stack now 0 8 20 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
+Next token is token number (1.6: 1)
+Shifting token number (1.6: 1)
+Entering state 1
+Stack now 0 4 12 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
 Entering state 29
-Stack now 0 8 20 29
+Stack now 0 4 12 20 29
 Reading a token
-Next token is token '+' (1.13: )
+Next token is token ')' (1.7: )
 Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 12 26
+Reducing stack by rule 13 (line 104):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 20
-Stack now 0 8 20
 Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 22
+Stack now 0 8 22
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
 Entering state 4
-Stack now 0 8 20 4
+Stack now 0 8 22 4
 Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Stack now 0 8 20 4 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
-Error: popping token error (1.16: )
-Stack now 0 8 20 4
-Shifting token error (1.16: )
-Entering state 11
-Stack now 0 8 20 4 11
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
+Entering state 1
+Stack now 0 8 22 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Stack now 0 8 22 4 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 19
+Stack now 0 8 22 4 12 19
+Reading a token
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Stack now 0 8 22 4 12 19 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
+Stack now 0 8 22 4 12 19 28
 Reading a token
 Next token is token ')' (1.17: )
+Reducing stack by rule 8 (line 91):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Stack now 0 8 22 4 12
+Next token is token ')' (1.17: )
 Shifting token ')' (1.17: )
-Entering state 25
-Stack now 0 8 20 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
+Entering state 26
+Stack now 0 8 22 4 12 26
+Reducing stack by rule 13 (line 104):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
    $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 29
-Stack now 0 8 20 29
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 31
+Stack now 0 8 22 31
 Reading a token
 Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
+Reducing stack by rule 10 (line 93):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
 Entering state 8
 Stack now 0 8
 Next token is token '\n' (1.18-2.0: )
@@ -138747,7 +137072,7 @@
 Entering state 24
 Stack now 0 8 24
 Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 3333)
+   $1 = nterm exp (1.1-17: 2)
    $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
@@ -138765,17 +137090,12 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1441: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1440: "$PERL" -pi -e 'use strict;
+input:
+./calc.at:1441: cat stderr
+  | (#) + (#) = 2222
+./calc.at:1435:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1438: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -138785,13 +137105,36 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1441: cat stderr
-./calc.at:1440: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token end of input (1.1: )
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token end of input (1.1: )
+Stack now 0
+./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 1 = 2 = 3
 ./calc.at:1441:  $PREPARSER ./calc  input
-input:
+./calc.at:1438: cat stderr
 stderr:
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token end of input (1.1: )
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token end of input (1.1: )
+Stack now 0
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
 Starting parse
 Entering state 0
 Stack now 0
@@ -138831,9 +137174,8 @@
 Stack now 0
 Cleanup: discarding lookahead token '=' (1.7: )
 Stack now 0
-  | 1 + 2 * 3 + !+ ++
 ./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1440:  $PREPARSER ./calc  input
+529. calc.at:1438:  ok
 stderr:
 Starting parse
 Entering state 0
@@ -138874,87 +137216,27 @@
 Stack now 0
 Cleanup: discarding lookahead token '=' (1.7: )
 Stack now 0
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
-Stack now 0 8 20 29 21
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 20 29 21 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Stack now 0 8 20 29 21 30
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Stack now 0 8 20 29
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 20 5
-Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Stack now 0 8 20 5 14
-Reducing stack by rule 17 (line 108):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Stack now 0 8 20
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1440: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1435: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1435: cat stderr
 ./calc.at:1441: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -138965,261 +137247,26 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
-Stack now 0 8 20 29 21
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 20 29 21 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Stack now 0 8 20 29 21 30
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Stack now 0 8 20 29
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 20 5
-Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Stack now 0 8 20 5 14
-Reducing stack by rule 17 (line 108):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Stack now 0 8 20
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1440: cat stderr
 ./calc.at:1441: cat stderr
-./calc.at:1440: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1440:  $PREPARSER ./calc  input
+  | (1 + #) = 1111
+./calc.at:1435:  $PREPARSER ./calc  input
 stderr:
+1.6: syntax error: invalid character: '#'
+./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
+./calc.at:1448: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
+input:
 input:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
-Stack now 0 8 20 29 21
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 20 29 21 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Stack now 0 8 20 29 21 30
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Stack now 0 8 20 29
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 20 5
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Stack now 0 8 20 5 13
-Reducing stack by rule 18 (line 109):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Stack now 0 8 20
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 
   | +1
 ./calc.at:1441:  $PREPARSER ./calc  input
 stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
-Stack now 0 8 20 29 21
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 20 29 21 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Stack now 0 8 20 29 21 30
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Stack now 0 8 20 29
-Next token is token '+' (1.11: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 20 5
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Stack now 0 8 20 5 13
-Reducing stack by rule 18 (line 109):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Stack now 0 8 20
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1440:  $PREPARSER ./calc  input
 stderr:
+1.6: syntax error: invalid character: '#'
 Starting parse
 Entering state 0
 Stack now 0
@@ -139272,311 +137319,8 @@
 Stack now 0
 Cleanup: discarding lookahead token '+' (2.1: )
 Stack now 0
-./calc.at:1440: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1441: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1440: cat stderr
-./calc.at:1441: cat stderr
-input:
-./calc.at:1441:  $PREPARSER ./calc  /dev/null
-stderr:
-  | (#) + (#) = 2222
-./calc.at:1440:  $PREPARSER ./calc  input
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token end of input (1.1: )
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token end of input (1.1: )
-Stack now 0
-stderr:
-./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.1-2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.1-2: )
-Stack now 0 4
-Shifting token error (1.1-2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1-2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
-Entering state 4
-Stack now 0 8 20 4
-Reading a token
-1.8: syntax error: invalid character: '#'
-Next token is token error (1.8: )
-Shifting token error (1.1-8: )
-Entering state 11
-Stack now 0 8 20 4 11
-Next token is token invalid token (1.8: )
-Error: discarding token invalid token (1.8: )
-Error: popping token error (1.1-8: )
-Stack now 0 8 20 4
-Shifting token error (1.1-8: )
-Entering state 11
-Stack now 0 8 20 4 11
-Reading a token
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 25
-Stack now 0 8 20 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.1-8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 18
-Stack now 0 8 18
-Reading a token
-Next token is token number (1.13-16: 2222)
-Shifting token number (1.13-16: 2222)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
-Entering state 27
-Stack now 0 8 18 27
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token end of input (1.1: )
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token end of input (1.1: )
-Stack now 0
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.1-2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.1-2: )
-Stack now 0 4
-Shifting token error (1.1-2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1-2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
-Entering state 4
-Stack now 0 8 20 4
-Reading a token
-1.8: syntax error: invalid character: '#'
-Next token is token error (1.8: )
-Shifting token error (1.1-8: )
-Entering state 11
-Stack now 0 8 20 4 11
-Next token is token invalid token (1.8: )
-Error: discarding token invalid token (1.8: )
-Error: popping token error (1.1-8: )
-Stack now 0 8 20 4
-Shifting token error (1.1-8: )
-Entering state 11
-Stack now 0 8 20 4 11
-Reading a token
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 25
-Stack now 0 8 20 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.1-8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 18
-Stack now 0 8 18
-Reading a token
-Next token is token number (1.13-16: 2222)
-Shifting token number (1.13-16: 2222)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
-Entering state 27
-Stack now 0 8 18 27
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1441: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1440: "$PERL" -pi -e 'use strict;
+./calc.at:1435: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -139586,216 +137330,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1441: cat stderr
-./calc.at:1440: cat stderr
-input:
-input:
-  | (1 + #) = 1111
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1441:  $PREPARSER ./calc  input
-./calc.at:1440:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Stack now 0 4 12 20
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 18
-Stack now 0 8 18
-Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 27
-Stack now 0 8 18 27
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Stack now 0 4 12 20
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 18
-Stack now 0 8 18
-Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 27
-Stack now 0 8 18 27
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -140113,8 +137647,19 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1441: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1441: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -140432,6 +137977,28 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1435: cat stderr
+./calc.at:1441:  $PREPARSER ./calc  /dev/null
+input:
+  | (# + 1) = 1111
+stderr:
+./calc.at:1435:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token end of input (1.1: )
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token end of input (1.1: )
+Stack now 0
+./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+536. calc.at:1449: testing Calculator C++ %header %locations api.location.file="my-location.hh"  ...
+./calc.at:1449: mv calc.y.tmp calc.y
+
+stderr:
+1.2: syntax error: invalid character: '#'
+./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1440: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -140442,6 +138009,18 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1449: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token end of input (1.1: )
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token end of input (1.1: )
+Stack now 0
+./calc.at:1440: cat stderr
+stderr:
+1.2: syntax error: invalid character: '#'
 ./calc.at:1441: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -140452,15 +138031,21 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1440: cat stderr
-./calc.at:1441: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1440:  $PREPARSER ./calc  input
 input:
   | (!!) + (1 2) = 1
-./calc.at:1441:  $PREPARSER ./calc  input
+./calc.at:1440:  $PREPARSER ./calc  input
+./calc.at:1441: cat stderr
 stderr:
+./calc.at:1435: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Stack now 0
@@ -140470,77 +138055,121 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.1-2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.1-2: )
-Stack now 0 4
-Shifting token error (1.1-2: )
-Entering state 11
-Stack now 0 4 11
+Next token is token '!' (1.2: )
+Shifting token '!' (1.2: )
+Entering state 5
+Stack now 0 4 5
 Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Error: popping token error (1.1-2: )
+Next token is token '!' (1.3: )
+Shifting token '!' (1.3: )
+Entering state 15
+Stack now 0 4 5 15
+Reducing stack by rule 16 (line 107):
+   $1 = token '!' (1.2: )
+   $2 = token '!' (1.3: )
 Stack now 0 4
-Shifting token error (1.1-4: )
+Shifting token error (1.2-3: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
-Error: popping token error (1.1-4: )
-Stack now 0 4
-Shifting token error (1.1-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
+Next token is token ')' (1.4: )
+Shifting token ')' (1.4: )
 Entering state 25
 Stack now 0 4 11 25
 Reducing stack by rule 14 (line 105):
    $1 = token '(' (1.1: )
-   $2 = token error (1.1-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+   $2 = token error (1.2-3: )
+   $3 = token ')' (1.4: )
+-> $$ = nterm exp (1.1-4: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
+Next token is token '+' (1.6: )
+Shifting token '+' (1.6: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '(' (1.8: )
+Shifting token '(' (1.8: )
+Entering state 4
+Stack now 0 8 20 4
+Reading a token
+Next token is token number (1.9: 1)
+Shifting token number (1.9: 1)
+Entering state 1
+Stack now 0 8 20 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.9: 1)
+-> $$ = nterm exp (1.9: 1)
+Entering state 12
+Stack now 0 8 20 4 12
+Reading a token
+Next token is token number (1.11: 2)
+1.11: syntax error, unexpected number
+Error: popping nterm exp (1.9: 1)
+Stack now 0 8 20 4
+Shifting token error (1.9-11: )
+Entering state 11
+Stack now 0 8 20 4 11
+Next token is token number (1.11: 2)
+Error: discarding token number (1.11: 2)
+Error: popping token error (1.9-11: )
+Stack now 0 8 20 4
+Shifting token error (1.9-11: )
+Entering state 11
+Stack now 0 8 20 4 11
+Reading a token
+Next token is token ')' (1.12: )
+Shifting token ')' (1.12: )
+Entering state 25
+Stack now 0 8 20 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.8: )
+   $2 = token error (1.9-11: )
+   $3 = token ')' (1.12: )
+-> $$ = nterm exp (1.8-12: 1111)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '=' (1.14: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1-4: 1111)
+   $2 = token '+' (1.6: )
+   $3 = nterm exp (1.8-12: 1111)
+-> $$ = nterm exp (1.1-12: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.14: )
+Shifting token '=' (1.14: )
 Entering state 18
 Stack now 0 8 18
 Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
 Entering state 1
 Stack now 0 8 18 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
 Entering state 27
 Stack now 0 8 18 27
 Reading a token
-Next token is token '\n' (1.15-2.0: )
+Next token is token '\n' (1.17-2.0: )
 Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+   $1 = nterm exp (1.1-12: 2222)
+   $2 = token '=' (1.14: )
+   $3 = nterm exp (1.16: 1)
+1.1-16: error: 2222 != 1
+-> $$ = nterm exp (1.1-16: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
 Entering state 24
 Stack now 0 8 24
 Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -140558,6 +138187,10 @@
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+./calc.at:1435: cat stderr
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1441:  $PREPARSER ./calc  input
 stderr:
 stderr:
 Starting parse
@@ -140700,7 +138333,6 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -140710,77 +138342,955 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.1-2: )
+Next token is token ')' (1.2: )
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
 Entering state 11
 Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.1-2: )
+Next token is token ')' (1.2: )
+Shifting token ')' (1.2: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.2: )
+-> $$ = nterm exp (1.1-2: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '(' (1.6: )
+Shifting token '(' (1.6: )
+Entering state 4
+Stack now 0 8 20 4
+Reading a token
+Next token is token number (1.7: 1)
+Shifting token number (1.7: 1)
+Entering state 1
+Stack now 0 8 20 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.7: 1)
+-> $$ = nterm exp (1.7: 1)
+Entering state 12
+Stack now 0 8 20 4 12
+Reading a token
+Next token is token '+' (1.9: )
+Shifting token '+' (1.9: )
+Entering state 20
+Stack now 0 8 20 4 12 20
+Reading a token
+Next token is token number (1.11: 1)
+Shifting token number (1.11: 1)
+Entering state 1
+Stack now 0 8 20 4 12 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.11: 1)
+-> $$ = nterm exp (1.11: 1)
+Entering state 29
+Stack now 0 8 20 4 12 20 29
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.7: 1)
+   $2 = token '+' (1.9: )
+   $3 = nterm exp (1.11: 1)
+-> $$ = nterm exp (1.7-11: 2)
+Entering state 12
+Stack now 0 8 20 4 12
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 20
+Stack now 0 8 20 4 12 20
+Reading a token
+Next token is token number (1.15: 1)
+Shifting token number (1.15: 1)
+Entering state 1
+Stack now 0 8 20 4 12 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.15: 1)
+-> $$ = nterm exp (1.15: 1)
+Entering state 29
+Stack now 0 8 20 4 12 20 29
+Reading a token
+Next token is token '+' (1.17: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.7-11: 2)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15: 1)
+-> $$ = nterm exp (1.7-15: 3)
+Entering state 12
+Stack now 0 8 20 4 12
+Next token is token '+' (1.17: )
+Shifting token '+' (1.17: )
+Entering state 20
+Stack now 0 8 20 4 12 20
+Reading a token
+Next token is token ')' (1.18: )
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' (1.17: )
+Stack now 0 8 20 4 12
+Error: popping nterm exp (1.7-15: 3)
+Stack now 0 8 20 4
+Shifting token error (1.7-18: )
+Entering state 11
+Stack now 0 8 20 4 11
+Next token is token ')' (1.18: )
+Shifting token ')' (1.18: )
+Entering state 25
+Stack now 0 8 20 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.6: )
+   $2 = token error (1.7-18: )
+   $3 = token ')' (1.18: )
+-> $$ = nterm exp (1.6-18: 1111)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '+' (1.20: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1-2: 1111)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6-18: 1111)
+-> $$ = nterm exp (1.1-18: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.20: )
+Shifting token '+' (1.20: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '(' (1.22: )
+Shifting token '(' (1.22: )
+Entering state 4
+Stack now 0 8 20 4
+Reading a token
+Next token is token '*' (1.23: )
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.23: )
+Entering state 11
+Stack now 0 8 20 4 11
+Next token is token '*' (1.23: )
+Error: discarding token '*' (1.23: )
+Error: popping token error (1.23: )
+Stack now 0 8 20 4
+Shifting token error (1.23: )
+Entering state 11
+Stack now 0 8 20 4 11
+Reading a token
+Next token is token '*' (1.25: )
+Error: discarding token '*' (1.25: )
+Error: popping token error (1.23: )
+Stack now 0 8 20 4
+Shifting token error (1.23-25: )
+Entering state 11
+Stack now 0 8 20 4 11
+Reading a token
+Next token is token '*' (1.27: )
+Error: discarding token '*' (1.27: )
+Error: popping token error (1.23-25: )
+Stack now 0 8 20 4
+Shifting token error (1.23-27: )
+Entering state 11
+Stack now 0 8 20 4 11
+Reading a token
+Next token is token ')' (1.28: )
+Shifting token ')' (1.28: )
+Entering state 25
+Stack now 0 8 20 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.22: )
+   $2 = token error (1.23-27: )
+   $3 = token ')' (1.28: )
+-> $$ = nterm exp (1.22-28: 1111)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '+' (1.30: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1-18: 2222)
+   $2 = token '+' (1.20: )
+   $3 = nterm exp (1.22-28: 1111)
+-> $$ = nterm exp (1.1-28: 3333)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.30: )
+Shifting token '+' (1.30: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '(' (1.32: )
+Shifting token '(' (1.32: )
+Entering state 4
+Stack now 0 8 20 4
+Reading a token
+Next token is token number (1.33: 1)
+Shifting token number (1.33: 1)
+Entering state 1
+Stack now 0 8 20 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.33: 1)
+-> $$ = nterm exp (1.33: 1)
+Entering state 12
+Stack now 0 8 20 4 12
+Reading a token
+Next token is token '*' (1.35: )
+Shifting token '*' (1.35: )
+Entering state 21
+Stack now 0 8 20 4 12 21
+Reading a token
+Next token is token number (1.37: 2)
+Shifting token number (1.37: 2)
+Entering state 1
+Stack now 0 8 20 4 12 21 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.37: 2)
+-> $$ = nterm exp (1.37: 2)
+Entering state 30
+Stack now 0 8 20 4 12 21 30
+Reading a token
+Next token is token '*' (1.39: )
+Reducing stack by rule 9 (line 92):
+   $1 = nterm exp (1.33: 1)
+   $2 = token '*' (1.35: )
+   $3 = nterm exp (1.37: 2)
+-> $$ = nterm exp (1.33-37: 2)
+Entering state 12
+Stack now 0 8 20 4 12
+Next token is token '*' (1.39: )
+Shifting token '*' (1.39: )
+Entering state 21
+Stack now 0 8 20 4 12 21
+Reading a token
+Next token is token '*' (1.41: )
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' (1.39: )
+Stack now 0 8 20 4 12
+Error: popping nterm exp (1.33-37: 2)
+Stack now 0 8 20 4
+Shifting token error (1.33-41: )
+Entering state 11
+Stack now 0 8 20 4 11
+Next token is token '*' (1.41: )
+Error: discarding token '*' (1.41: )
+Error: popping token error (1.33-41: )
+Stack now 0 8 20 4
+Shifting token error (1.33-41: )
+Entering state 11
+Stack now 0 8 20 4 11
+Reading a token
+Next token is token ')' (1.42: )
+Shifting token ')' (1.42: )
+Entering state 25
+Stack now 0 8 20 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.32: )
+   $2 = token error (1.33-41: )
+   $3 = token ')' (1.42: )
+-> $$ = nterm exp (1.32-42: 1111)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '=' (1.44: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1-28: 3333)
+   $2 = token '+' (1.30: )
+   $3 = nterm exp (1.32-42: 1111)
+-> $$ = nterm exp (1.1-42: 4444)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.44: )
+Shifting token '=' (1.44: )
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1.46: 1)
+Shifting token number (1.46: 1)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.46: 1)
+-> $$ = nterm exp (1.46: 1)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' (1.47-2.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1.1-42: 4444)
+   $2 = token '=' (1.44: )
+   $3 = nterm exp (1.46: 1)
+1.1-46: error: 4444 != 1
+-> $$ = nterm exp (1.1-46: 4444)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.47-2.0: )
+Shifting token '\n' (1.47-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-46: 4444)
+   $2 = token '\n' (1.47-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1449: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
 Stack now 0 4
-Shifting token error (1.1-2: )
+Reading a token
+Next token is token ')' (1.2: )
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
 Entering state 11
 Stack now 0 4 11
+Next token is token ')' (1.2: )
+Shifting token ')' (1.2: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.2: )
+-> $$ = nterm exp (1.1-2: 1111)
+Entering state 8
+Stack now 0 8
 Reading a token
 Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Error: popping token error (1.1-2: )
+Shifting token '+' (1.4: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '(' (1.6: )
+Shifting token '(' (1.6: )
+Entering state 4
+Stack now 0 8 20 4
+Reading a token
+Next token is token number (1.7: 1)
+Shifting token number (1.7: 1)
+Entering state 1
+Stack now 0 8 20 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.7: 1)
+-> $$ = nterm exp (1.7: 1)
+Entering state 12
+Stack now 0 8 20 4 12
+Reading a token
+Next token is token '+' (1.9: )
+Shifting token '+' (1.9: )
+Entering state 20
+Stack now 0 8 20 4 12 20
+Reading a token
+Next token is token number (1.11: 1)
+Shifting token number (1.11: 1)
+Entering state 1
+Stack now 0 8 20 4 12 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.11: 1)
+-> $$ = nterm exp (1.11: 1)
+Entering state 29
+Stack now 0 8 20 4 12 20 29
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.7: 1)
+   $2 = token '+' (1.9: )
+   $3 = nterm exp (1.11: 1)
+-> $$ = nterm exp (1.7-11: 2)
+Entering state 12
+Stack now 0 8 20 4 12
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 20
+Stack now 0 8 20 4 12 20
+Reading a token
+Next token is token number (1.15: 1)
+Shifting token number (1.15: 1)
+Entering state 1
+Stack now 0 8 20 4 12 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.15: 1)
+-> $$ = nterm exp (1.15: 1)
+Entering state 29
+Stack now 0 8 20 4 12 20 29
+Reading a token
+Next token is token '+' (1.17: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.7-11: 2)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15: 1)
+-> $$ = nterm exp (1.7-15: 3)
+Entering state 12
+Stack now 0 8 20 4 12
+Next token is token '+' (1.17: )
+Shifting token '+' (1.17: )
+Entering state 20
+Stack now 0 8 20 4 12 20
+Reading a token
+Next token is token ')' (1.18: )
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' (1.17: )
+Stack now 0 8 20 4 12
+Error: popping nterm exp (1.7-15: 3)
+Stack now 0 8 20 4
+Shifting token error (1.7-18: )
+Entering state 11
+Stack now 0 8 20 4 11
+Next token is token ')' (1.18: )
+Shifting token ')' (1.18: )
+Entering state 25
+Stack now 0 8 20 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.6: )
+   $2 = token error (1.7-18: )
+   $3 = token ')' (1.18: )
+-> $$ = nterm exp (1.6-18: 1111)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '+' (1.20: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1-2: 1111)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6-18: 1111)
+-> $$ = nterm exp (1.1-18: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.20: )
+Shifting token '+' (1.20: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '(' (1.22: )
+Shifting token '(' (1.22: )
+Entering state 4
+Stack now 0 8 20 4
+Reading a token
+Next token is token '*' (1.23: )
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.23: )
+Entering state 11
+Stack now 0 8 20 4 11
+Next token is token '*' (1.23: )
+Error: discarding token '*' (1.23: )
+Error: popping token error (1.23: )
+Stack now 0 8 20 4
+Shifting token error (1.23: )
+Entering state 11
+Stack now 0 8 20 4 11
+Reading a token
+Next token is token '*' (1.25: )
+Error: discarding token '*' (1.25: )
+Error: popping token error (1.23: )
+Stack now 0 8 20 4
+Shifting token error (1.23-25: )
+Entering state 11
+Stack now 0 8 20 4 11
+Reading a token
+Next token is token '*' (1.27: )
+Error: discarding token '*' (1.27: )
+Error: popping token error (1.23-25: )
+Stack now 0 8 20 4
+Shifting token error (1.23-27: )
+Entering state 11
+Stack now 0 8 20 4 11
+Reading a token
+Next token is token ')' (1.28: )
+Shifting token ')' (1.28: )
+Entering state 25
+Stack now 0 8 20 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.22: )
+   $2 = token error (1.23-27: )
+   $3 = token ')' (1.28: )
+-> $$ = nterm exp (1.22-28: 1111)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '+' (1.30: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1-18: 2222)
+   $2 = token '+' (1.20: )
+   $3 = nterm exp (1.22-28: 1111)
+-> $$ = nterm exp (1.1-28: 3333)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.30: )
+Shifting token '+' (1.30: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '(' (1.32: )
+Shifting token '(' (1.32: )
+Entering state 4
+Stack now 0 8 20 4
+Reading a token
+Next token is token number (1.33: 1)
+Shifting token number (1.33: 1)
+Entering state 1
+Stack now 0 8 20 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.33: 1)
+-> $$ = nterm exp (1.33: 1)
+Entering state 12
+Stack now 0 8 20 4 12
+Reading a token
+Next token is token '*' (1.35: )
+Shifting token '*' (1.35: )
+Entering state 21
+Stack now 0 8 20 4 12 21
+Reading a token
+Next token is token number (1.37: 2)
+Shifting token number (1.37: 2)
+Entering state 1
+Stack now 0 8 20 4 12 21 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.37: 2)
+-> $$ = nterm exp (1.37: 2)
+Entering state 30
+Stack now 0 8 20 4 12 21 30
+Reading a token
+Next token is token '*' (1.39: )
+Reducing stack by rule 9 (line 92):
+   $1 = nterm exp (1.33: 1)
+   $2 = token '*' (1.35: )
+   $3 = nterm exp (1.37: 2)
+-> $$ = nterm exp (1.33-37: 2)
+Entering state 12
+Stack now 0 8 20 4 12
+Next token is token '*' (1.39: )
+Shifting token '*' (1.39: )
+Entering state 21
+Stack now 0 8 20 4 12 21
+Reading a token
+Next token is token '*' (1.41: )
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' (1.39: )
+Stack now 0 8 20 4 12
+Error: popping nterm exp (1.33-37: 2)
+Stack now 0 8 20 4
+Shifting token error (1.33-41: )
+Entering state 11
+Stack now 0 8 20 4 11
+Next token is token '*' (1.41: )
+Error: discarding token '*' (1.41: )
+Error: popping token error (1.33-41: )
+Stack now 0 8 20 4
+Shifting token error (1.33-41: )
+Entering state 11
+Stack now 0 8 20 4 11
+Reading a token
+Next token is token ')' (1.42: )
+Shifting token ')' (1.42: )
+Entering state 25
+Stack now 0 8 20 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.32: )
+   $2 = token error (1.33-41: )
+   $3 = token ')' (1.42: )
+-> $$ = nterm exp (1.32-42: 1111)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '=' (1.44: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1-28: 3333)
+   $2 = token '+' (1.30: )
+   $3 = nterm exp (1.32-42: 1111)
+-> $$ = nterm exp (1.1-42: 4444)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.44: )
+Shifting token '=' (1.44: )
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1.46: 1)
+Shifting token number (1.46: 1)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.46: 1)
+-> $$ = nterm exp (1.46: 1)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' (1.47-2.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1.1-42: 4444)
+   $2 = token '=' (1.44: )
+   $3 = nterm exp (1.46: 1)
+1.1-46: error: 4444 != 1
+-> $$ = nterm exp (1.1-46: 4444)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.47-2.0: )
+Shifting token '\n' (1.47-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-46: 4444)
+   $2 = token '\n' (1.47-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+input:
+  | (1 + # + 1) = 1111
+./calc.at:1435:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1440: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1441: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+1.6: syntax error: invalid character: '#'
+./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1441: cat stderr
+stderr:
+1.6: syntax error: invalid character: '#'
+./calc.at:1440: cat stderr
+input:
+input:
+./calc.at:1435: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | (- *) + (1 2) = 1
+./calc.at:1440:  $PREPARSER ./calc  input
+stderr:
+  | (!!) + (1 2) = 1
+./calc.at:1441:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
 Stack now 0 4
-Shifting token error (1.1-4: )
+Reading a token
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
+Entering state 2
+Stack now 0 4 2
+Reading a token
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Stack now 0 4 2 9
+Reducing stack by rule 15 (line 106):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Error: popping token error (1.2-4: )
+Stack now 0 4
+Shifting token error (1.2-4: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
-Error: popping token error (1.1-4: )
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Stack now 0 8 20 4
+Reading a token
+Next token is token number (1.10: 1)
+Shifting token number (1.10: 1)
+Entering state 1
+Stack now 0 8 20 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
+Entering state 12
+Stack now 0 8 20 4 12
+Reading a token
+Next token is token number (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
+Stack now 0 8 20 4
+Shifting token error (1.10-12: )
+Entering state 11
+Stack now 0 8 20 4 11
+Next token is token number (1.12: 2)
+Error: discarding token number (1.12: 2)
+Error: popping token error (1.10-12: )
+Stack now 0 8 20 4
+Shifting token error (1.10-12: )
+Entering state 11
+Stack now 0 8 20 4 11
+Reading a token
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
+Entering state 25
+Stack now 0 8 20 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '=' (1.15: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1.17: 1)
+Shifting token number (1.17: 1)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
 Stack now 0 4
-Shifting token error (1.1-6: )
+Reading a token
+Next token is token '!' (1.2: )
+Shifting token '!' (1.2: )
+Entering state 5
+Stack now 0 4 5
+Reading a token
+Next token is token '!' (1.3: )
+Shifting token '!' (1.3: )
+Entering state 15
+Stack now 0 4 5 15
+Reducing stack by rule 16 (line 107):
+   $1 = token '!' (1.2: )
+   $2 = token '!' (1.3: )
+Stack now 0 4
+Shifting token error (1.2-3: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
+Next token is token ')' (1.4: )
+Shifting token ')' (1.4: )
 Entering state 25
 Stack now 0 4 11 25
 Reducing stack by rule 14 (line 105):
    $1 = token '(' (1.1: )
-   $2 = token error (1.1-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+   $2 = token error (1.2-3: )
+   $3 = token ')' (1.4: )
+-> $$ = nterm exp (1.1-4: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
+Next token is token '+' (1.6: )
+Shifting token '+' (1.6: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '(' (1.8: )
+Shifting token '(' (1.8: )
+Entering state 4
+Stack now 0 8 20 4
+Reading a token
+Next token is token number (1.9: 1)
+Shifting token number (1.9: 1)
+Entering state 1
+Stack now 0 8 20 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.9: 1)
+-> $$ = nterm exp (1.9: 1)
+Entering state 12
+Stack now 0 8 20 4 12
+Reading a token
+Next token is token number (1.11: 2)
+1.11: syntax error, unexpected number
+Error: popping nterm exp (1.9: 1)
+Stack now 0 8 20 4
+Shifting token error (1.9-11: )
+Entering state 11
+Stack now 0 8 20 4 11
+Next token is token number (1.11: 2)
+Error: discarding token number (1.11: 2)
+Error: popping token error (1.9-11: )
+Stack now 0 8 20 4
+Shifting token error (1.9-11: )
+Entering state 11
+Stack now 0 8 20 4 11
+Reading a token
+Next token is token ')' (1.12: )
+Shifting token ')' (1.12: )
+Entering state 25
+Stack now 0 8 20 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.8: )
+   $2 = token error (1.9-11: )
+   $3 = token ')' (1.12: )
+-> $$ = nterm exp (1.8-12: 1111)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '=' (1.14: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1-4: 1111)
+   $2 = token '+' (1.6: )
+   $3 = nterm exp (1.8-12: 1111)
+-> $$ = nterm exp (1.1-12: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.14: )
+Shifting token '=' (1.14: )
 Entering state 18
 Stack now 0 8 18
 Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
 Entering state 1
 Stack now 0 8 18 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
 Entering state 27
 Stack now 0 8 18 27
 Reading a token
-Next token is token '\n' (1.15-2.0: )
+Next token is token '\n' (1.17-2.0: )
 Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+   $1 = nterm exp (1.1-12: 2222)
+   $2 = token '=' (1.14: )
+   $3 = nterm exp (1.16: 1)
+1.1-16: error: 2222 != 1
+-> $$ = nterm exp (1.1-16: 2222)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
 Entering state 24
 Stack now 0 8 24
 Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -140797,6 +139307,9 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1435: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -140807,6 +139320,154 @@
 Entering state 4
 Stack now 0 4
 Reading a token
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
+Entering state 2
+Stack now 0 4 2
+Reading a token
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Stack now 0 4 2 9
+Reducing stack by rule 15 (line 106):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Error: popping token error (1.2-4: )
+Stack now 0 4
+Shifting token error (1.2-4: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Stack now 0 8 20 4
+Reading a token
+Next token is token number (1.10: 1)
+Shifting token number (1.10: 1)
+Entering state 1
+Stack now 0 8 20 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
+Entering state 12
+Stack now 0 8 20 4 12
+Reading a token
+Next token is token number (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
+Stack now 0 8 20 4
+Shifting token error (1.10-12: )
+Entering state 11
+Stack now 0 8 20 4 11
+Next token is token number (1.12: 2)
+Error: discarding token number (1.12: 2)
+Error: popping token error (1.10-12: )
+Stack now 0 8 20 4
+Shifting token error (1.10-12: )
+Entering state 11
+Stack now 0 8 20 4 11
+Reading a token
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
+Entering state 25
+Stack now 0 8 20 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '=' (1.15: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1.17: 1)
+Shifting token number (1.17: 1)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
 Next token is token '!' (1.2: )
 Shifting token '!' (1.2: )
 Entering state 5
@@ -140948,7 +139609,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1440: cat stderr
+input:
+  | (1 + 1) / (1 - 1)
 ./calc.at:1441: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -140959,13 +139621,17 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1441: cat stderr
+./calc.at:1435:  $PREPARSER ./calc  input
+./calc.at:1440: cat stderr
+stderr:
 input:
-  | (1 + # + 1) = 1111
+1.11-17: error: null divisor
+  | (* *) + (*) + (*)
+./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1440:  $PREPARSER ./calc  input
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1441:  $PREPARSER ./calc  input
+./calc.at:1441: cat stderr
+stderr:
+1.11-17: error: null divisor
 stderr:
 Starting parse
 Entering state 0
@@ -140976,96 +139642,132 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Stack now 0 4 12 20
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
 Entering state 11
 Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
+Error: popping token error (1.2: )
 Stack now 0 4
-Shifting token error (1.2-6: )
+Shifting token error (1.2: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Error: popping token error (1.2-6: )
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Error: popping token error (1.2: )
 Stack now 0 4
-Shifting token error (1.2-8: )
+Shifting token error (1.2-4: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
-Error: popping token error (1.2-8: )
-Stack now 0 4
-Shifting token error (1.2-10: )
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Stack now 0 8 20 4
+Reading a token
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
 Entering state 11
-Stack now 0 4 11
+Stack now 0 8 20 4 11
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
+Error: popping token error (1.10: )
+Stack now 0 8 20 4
+Shifting token error (1.10: )
+Entering state 11
+Stack now 0 8 20 4 11
 Reading a token
 Next token is token ')' (1.11: )
 Shifting token ')' (1.11: )
 Entering state 25
-Stack now 0 4 11 25
+Stack now 0 8 20 4 11 25
 Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
    $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
+-> $$ = nterm exp (1.9-11: 1111)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
 Entering state 8
 Stack now 0 8
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 20
+Stack now 0 8 20
 Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 18
-Stack now 0 8 18
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
+Stack now 0 8 20 4
 Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 27
-Stack now 0 8 18 27
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Stack now 0 8 20 4 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
+Error: popping token error (1.16: )
+Stack now 0 8 20 4
+Shifting token error (1.16: )
+Entering state 11
+Stack now 0 8 20 4 11
 Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 25
+Stack now 0 8 20 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 24
 Stack now 0 8 24
 Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
+   $1 = nterm exp (1.1-17: 3333)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -141083,6 +139785,7 @@
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
 stderr:
 stderr:
 Starting parse
@@ -141094,96 +139797,132 @@
 Entering state 4
 Stack now 0 4
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Stack now 0 4 12 20
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
 Entering state 11
 Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
+Error: popping token error (1.2: )
 Stack now 0 4
-Shifting token error (1.2-6: )
+Shifting token error (1.2: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Error: popping token error (1.2-6: )
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Error: popping token error (1.2: )
 Stack now 0 4
-Shifting token error (1.2-8: )
+Shifting token error (1.2-4: )
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
-Error: popping token error (1.2-8: )
-Stack now 0 4
-Shifting token error (1.2-10: )
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Stack now 0 8 20 4
+Reading a token
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
 Entering state 11
-Stack now 0 4 11
+Stack now 0 8 20 4 11
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
+Error: popping token error (1.10: )
+Stack now 0 8 20 4
+Shifting token error (1.10: )
+Entering state 11
+Stack now 0 8 20 4 11
 Reading a token
 Next token is token ')' (1.11: )
 Shifting token ')' (1.11: )
 Entering state 25
-Stack now 0 4 11 25
+Stack now 0 8 20 4 11 25
 Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
    $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
+-> $$ = nterm exp (1.9-11: 1111)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
 Entering state 8
 Stack now 0 8
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 20
+Stack now 0 8 20
 Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 18
-Stack now 0 8 18
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
+Stack now 0 8 20 4
 Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 27
-Stack now 0 8 18 27
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Stack now 0 8 20 4 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
+Error: popping token error (1.16: )
+Stack now 0 8 20 4
+Shifting token error (1.16: )
+Entering state 11
+Stack now 0 8 20 4 11
 Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 25
+Stack now 0 8 20 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 24
 Stack now 0 8 24
 Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
+   $1 = nterm exp (1.1-17: 3333)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -141200,6 +139939,44 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1435: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | (- *) + (1 2) = 1
+./calc.at:1441:  $PREPARSER ./calc  input
+./calc.at:1440: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1440: cat stderr
+stdout:
+./calc.at:1435: cat stderr
+./calc.at:1445: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc
+
+input:
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -141349,20 +140126,10 @@
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+527. calc.at:1435:  ok
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1440:  $PREPARSER ./calc  input
 stderr:
-stderr:
-stdout:
-./calc.at:1445: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc
-
 Starting parse
 Entering state 0
 Stack now 0
@@ -141511,27 +140278,171 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1440: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Stack now 0 8 20 29 21
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 20 29 21 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Stack now 0 8 20 29 21 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Stack now 0 8 20 29
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 20 5
+Reading a token
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Stack now 0 8 20 5 14
+Reducing stack by rule 17 (line 108):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Stack now 0 8 20
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-./calc.at:1441: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+stderr:
+
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Stack now 0 8 20 29 21
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 20 29 21 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Stack now 0 8 20 29 21 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Stack now 0 8 20 29
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 20 5
+Reading a token
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Stack now 0 8 20 5 14
+Reducing stack by rule 17 (line 108):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Stack now 0 8 20
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -141546,15 +140457,19 @@
   | 2^2^3 = 256
   | (2^2)^3 = 64
 ./calc.at:1445:  $PREPARSER ./calc  input
-./calc.at:1440: cat stderr
-./calc.at:1441: cat stderr
-input:
-input:
-  | (* *) + (*) + (*)
-  | (1 + 1) / (1 - 1)
-./calc.at:1440:  $PREPARSER ./calc  input
-./calc.at:1441:  $PREPARSER ./calc  input
+./calc.at:1440: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+./calc.at:1441: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
+input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -142572,152 +141487,13 @@
 Stack now 0 6 16
 Cleanup: popping token end of input ()
 Cleanup: popping nterm input ()
-stderr:
+./calc.at:1441: cat stderr
 ./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Stack now 0 4 12 20
-Reading a token
-Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
-Entering state 1
-Stack now 0 4 12 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 29
-Stack now 0 4 12 20 29
-Reading a token
-Next token is token ')' (1.7: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 12 26
-Reducing stack by rule 13 (line 104):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 22
-Stack now 0 8 22
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Stack now 0 8 22 4
-Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
-Entering state 1
-Stack now 0 8 22 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Stack now 0 8 22 4 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 19
-Stack now 0 8 22 4 12 19
-Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
-Entering state 1
-Stack now 0 8 22 4 12 19 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 28
-Stack now 0 8 22 4 12 19 28
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack by rule 8 (line 91):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Stack now 0 8 22 4 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Stack now 0 8 22 4 12 26
-Reducing stack by rule 13 (line 104):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 31
-Stack now 0 8 22 31
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 10 (line 93):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 + !- ++
+./calc.at:1440:  $PREPARSER ./calc  input
 stderr:
 stderr:
+input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -143736,6 +142512,91 @@
 Cleanup: popping token end of input ()
 Cleanup: popping nterm input ()
 ./calc.at:1445: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+  | (* *) + (*) + (*)
+./calc.at:1441:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Stack now 0 8 20 29 21
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 20 29 21 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Stack now 0 8 20 29 21 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Stack now 0 8 20 29
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 20 5
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Stack now 0 8 20 5 13
+Reducing stack by rule 18 (line 109):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Stack now 0 8 20
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -143887,150 +142748,103 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+input:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
 Entering state 1
-Stack now 0 4 1
+Stack now 0 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
 Entering state 20
-Stack now 0 4 12 20
+Stack now 0 8 20
 Reading a token
-Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
 Entering state 1
-Stack now 0 4 12 20 1
+Stack now 0 8 20 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.6: 1)
--> $$ = nterm exp (1.6: 1)
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
 Entering state 29
-Stack now 0 4 12 20 29
+Stack now 0 8 20 29
 Reading a token
-Next token is token ')' (1.7: )
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Stack now 0 8 20 29 21
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 20 29 21 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Stack now 0 8 20 29 21 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Stack now 0 8 20 29
+Next token is token '+' (1.11: )
 Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 12 26
-Reducing stack by rule 13 (line 104):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
 Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 20
+Stack now 0 8 20
 Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 22
-Stack now 0 8 22
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Stack now 0 8 22 4
-Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
-Entering state 1
-Stack now 0 8 22 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Stack now 0 8 22 4 12
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 20 5
 Reading a token
 Next token is token '-' (1.14: )
 Shifting token '-' (1.14: )
-Entering state 19
-Stack now 0 8 22 4 12 19
-Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
-Entering state 1
-Stack now 0 8 22 4 12 19 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 28
-Stack now 0 8 22 4 12 19 28
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack by rule 8 (line 91):
-   $1 = nterm exp (1.12: 1)
+Entering state 13
+Stack now 0 8 20 5 13
+Reducing stack by rule 18 (line 109):
+   $1 = token '!' (1.13: )
    $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Stack now 0 8 22 4 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Stack now 0 8 22 4 12 26
-Reducing stack by rule 13 (line 104):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 31
-Stack now 0 8 22 31
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 10 (line 93):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
+Stack now 0 8 20
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 2
+./calc.at:1445:  $PREPARSER ./calc  input
 stderr:
+stderr:
+./calc.at:1440: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+537. calc.at:1451: testing Calculator C++ %no-lines %header %locations api.location.file="my-location.hh"  ...
 Starting parse
 Entering state 0
 Stack now 0
@@ -144182,20 +142996,9 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-input:
-  | 1 2
-./calc.at:1445:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1440: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1451: mv calc.y.tmp calc.y
+
+./calc.at:1440: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -144216,6 +143019,7 @@
 Stack now 0
 Cleanup: discarding lookahead token number (2)
 Stack now 0
+./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1441: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -144226,12 +143030,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1440: cat stderr
-./calc.at:1441: cat stderr
-530. calc.at:1440:  ok
-stderr:
 input:
+  | (#) + (#) = 2222
+./calc.at:1440:  $PREPARSER ./calc  input
+./calc.at:1451: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -144252,10 +143055,270 @@
 Stack now 0
 Cleanup: discarding lookahead token number (2)
 Stack now 0
-
+./calc.at:1441: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.1-2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.1-2: )
+Stack now 0 4
+Shifting token error (1.1-2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1-2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
+Entering state 4
+Stack now 0 8 20 4
+Reading a token
+1.8: syntax error: invalid character: '#'
+Next token is token error (1.8: )
+Shifting token error (1.1-8: )
+Entering state 11
+Stack now 0 8 20 4 11
+Next token is token invalid token (1.8: )
+Error: discarding token invalid token (1.8: )
+Error: popping token error (1.1-8: )
+Stack now 0 8 20 4
+Shifting token error (1.1-8: )
+Entering state 11
+Stack now 0 8 20 4 11
+Reading a token
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
+Entering state 25
+Stack now 0 8 20 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.7: )
+   $2 = token error (1.1-8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1.13-16: 2222)
+Shifting token number (1.13-16: 2222)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
   | 1 + 2 * 3 + !+ ++
 ./calc.at:1441:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.1-2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.1-2: )
+Stack now 0 4
+Shifting token error (1.1-2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1-2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
+Entering state 4
+Stack now 0 8 20 4
+Reading a token
+1.8: syntax error: invalid character: '#'
+Next token is token error (1.8: )
+Shifting token error (1.1-8: )
+Entering state 11
+Stack now 0 8 20 4 11
+Next token is token invalid token (1.8: )
+Error: discarding token invalid token (1.8: )
+Error: popping token error (1.1-8: )
+Stack now 0 8 20 4
+Shifting token error (1.1-8: )
+Entering state 11
+Stack now 0 8 20 4 11
+Reading a token
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
+Entering state 25
+Stack now 0 8 20 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.7: )
+   $2 = token error (1.1-8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1.13-16: 2222)
+Shifting token number (1.13-16: 2222)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 stderr:
+./calc.at:1445: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Stack now 0
@@ -144337,6 +143400,17 @@
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
 ./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1445: cat stderr
+./calc.at:1440: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -144419,24 +143493,74 @@
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
 ./calc.at:1441: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-./calc.at:1445: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1445: cat stderr
+input:
+./calc.at:1440: cat stderr
+  | 1//2
+./calc.at:1445:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
+Stack now 0 8 22
+Reading a token
+Next token is token '/' ()
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' ()
+Stack now 0 8
+Error: popping nterm exp (1)
+Stack now 0
+Cleanup: discarding lookahead token '/' ()
+Stack now 0
 input:
   | 1 + 2 * 3 + !- ++
+./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1441:  $PREPARSER ./calc  input
 input:
 stderr:
-  | 1//2
-./calc.at:1445:  $PREPARSER ./calc  input
+stderr:
+  | (1 + #) = 1111
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
+Stack now 0 8 22
+Reading a token
+Next token is token '/' ()
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' ()
+Stack now 0 8
+Error: popping nterm exp (1)
+Stack now 0
+Cleanup: discarding lookahead token '/' ()
+Stack now 0
+./calc.at:1440:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -144523,31 +143647,214 @@
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
 Entering state 1
-Stack now 0 1
+Stack now 0 4 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Stack now 0 4 12 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Stack now 0 8 22
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 18
+Stack now 0 8 18
 Reading a token
-Next token is token '/' ()
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' ()
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
 Stack now 0 8
-Error: popping nterm exp (1)
-Stack now 0
-Cleanup: discarding lookahead token '/' ()
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1451: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
+./calc.at:1445: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+Starting parse
+Entering state 0
 Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Stack now 0 4 12 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 stderr:
-./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -144628,35 +143935,7 @@
 Stack now 0 8 20
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Stack now 0 8 22
-Reading a token
-Next token is token '/' ()
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' ()
-Stack now 0 8
-Error: popping nterm exp (1)
-Stack now 0
-Cleanup: discarding lookahead token '/' ()
-Stack now 0
-./calc.at:1441: "$PERL" -pi -e 'use strict;
+./calc.at:1440: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -144666,8 +143945,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-538. calc.at:1453: testing Calculator C++ %locations parse.lac=full parse.error=verbose  ...
-./calc.at:1445: "$PERL" -pi -e 'use strict;
+./calc.at:1445: cat stderr
+./calc.at:1440: cat stderr
+./calc.at:1441: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -144678,16 +143958,221 @@
   }eg
 ' expout || exit 77
 ./calc.at:1441: cat stderr
-./calc.at:1453: mv calc.y.tmp calc.y
-
-./calc.at:1453: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1445: cat stderr
 input:
+input:
+  | (# + 1) = 1111
+./calc.at:1440:  $PREPARSER ./calc  input
   | error
 ./calc.at:1445:  $PREPARSER ./calc  input
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token invalid token ()
+syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token ()
+Stack now 0
 input:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.1-2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.1-2: )
+Stack now 0 4
+Shifting token error (1.1-2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Error: popping token error (1.1-2: )
+Stack now 0 4
+Shifting token error (1.1-4: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.6: 1)
+Error: discarding token number (1.6: 1)
+Error: popping token error (1.1-4: )
+Stack now 0 4
+Shifting token error (1.1-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | (#) + (#) = 2222
 ./calc.at:1441:  $PREPARSER ./calc  input
+./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.1-2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.1-2: )
+Stack now 0 4
+Shifting token error (1.1-2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Error: popping token error (1.1-2: )
+Stack now 0 4
+Shifting token error (1.1-4: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.6: 1)
+Error: discarding token number (1.6: 1)
+Error: popping token error (1.1-4: )
+Stack now 0 4
+Shifting token error (1.1-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -144697,8 +144182,6 @@
 syntax error, unexpected invalid token
 Cleanup: discarding lookahead token invalid token ()
 Stack now 0
-stderr:
-./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -144823,16 +144306,17 @@
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1440: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token invalid token ()
-syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token ()
-Stack now 0
 Starting parse
 Entering state 0
 Stack now 0
@@ -144956,7 +144440,8 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1441: "$PERL" -pi -e 'use strict;
+./calc.at:1440: cat stderr
+./calc.at:1445: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -144966,8 +144451,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1441: cat stderr
-./calc.at:1445: "$PERL" -pi -e 'use strict;
+./calc.at:1441: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -144977,14 +144461,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1445: cat stderr
 input:
-  | (1 + #) = 1111
-./calc.at:1441:  $PREPARSER ./calc  input
+./calc.at:1445: cat stderr
+  | (1 + # + 1) = 1111
+./calc.at:1440:  $PREPARSER ./calc  input
+./calc.at:1441: cat stderr
 input:
 stderr:
-  | 1 = 2 = 3
-./calc.at:1445:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -145026,48 +144509,64 @@
 Entering state 11
 Stack now 0 4 11
 Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-8: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.10: 1)
+Error: discarding token number (1.10: 1)
+Error: popping token error (1.2-8: )
+Stack now 0 4
+Shifting token error (1.2-10: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
 Entering state 25
 Stack now 0 4 11 25
 Reducing stack by rule 14 (line 105):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
 Entering state 18
 Stack now 0 8 18
 Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
 Entering state 1
 Stack now 0 8 18 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
 Entering state 27
 Stack now 0 8 18 27
 Reading a token
-Next token is token '\n' (1.15-2.0: )
+Next token is token '\n' (1.19-2.0: )
 Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
 Entering state 24
 Stack now 0 8 24
 Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -145084,9 +144583,12 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 = 2 = 3
+./calc.at:1445:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1453: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+stderr:
+input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -145126,9 +144628,136 @@
 Stack now 0
 Cleanup: discarding lookahead token '=' ()
 Stack now 0
-stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Stack now 0 4 12 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-8: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.10: 1)
+Error: discarding token number (1.10: 1)
+Error: popping token error (1.2-8: )
+Stack now 0 4
+Shifting token error (1.2-10: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+  | (1 + #) = 1111
+./calc.at:1441:  $PREPARSER ./calc  input
 ./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
+./calc.at:1440: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Stack now 0
@@ -145228,6 +144857,7 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -145267,7 +144897,109 @@
 Stack now 0
 Cleanup: discarding lookahead token '=' ()
 Stack now 0
-./calc.at:1441: "$PERL" -pi -e 'use strict;
+stderr:
+./calc.at:1440: cat stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Stack now 0 4 12 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+input:
+./calc.at:1445: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -145277,7 +145009,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1445: "$PERL" -pi -e 'use strict;
+  | (1 + 1) / (1 - 1)
+./calc.at:1440:  $PREPARSER ./calc  input
+./calc.at:1441: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -145287,16 +145021,302 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Stack now 0 4 12 20
+Reading a token
+Next token is token number (1.6: 1)
+Shifting token number (1.6: 1)
+Entering state 1
+Stack now 0 4 12 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 29
+Stack now 0 4 12 20 29
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 12 26
+Reducing stack by rule 13 (line 104):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 22
+Stack now 0 8 22
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Stack now 0 8 22 4
+Reading a token
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
+Entering state 1
+Stack now 0 8 22 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Stack now 0 8 22 4 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 19
+Stack now 0 8 22 4 12 19
+Reading a token
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Stack now 0 8 22 4 12 19 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
+Stack now 0 8 22 4 12 19 28
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack by rule 8 (line 91):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Stack now 0 8 22 4 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Stack now 0 8 22 4 12 26
+Reducing stack by rule 13 (line 104):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 31
+Stack now 0 8 22 31
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 10 (line 93):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1441: cat stderr
 ./calc.at:1445: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Stack now 0 4 12 20
+Reading a token
+Next token is token number (1.6: 1)
+Shifting token number (1.6: 1)
+Entering state 1
+Stack now 0 4 12 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 29
+Stack now 0 4 12 20 29
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 12 26
+Reducing stack by rule 13 (line 104):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 22
+Stack now 0 8 22
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Stack now 0 8 22 4
+Reading a token
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
+Entering state 1
+Stack now 0 8 22 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Stack now 0 8 22 4 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 19
+Stack now 0 8 22 4 12 19
+Reading a token
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Stack now 0 8 22 4 12 19 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
+Stack now 0 8 22 4 12 19 28
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack by rule 8 (line 91):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Stack now 0 8 22 4 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Stack now 0 8 22 4 12 26
+Reducing stack by rule 13 (line 104):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 31
+Stack now 0 8 22 31
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 10 (line 93):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 input:
-  | (# + 1) = 1111
 input:
+  | (# + 1) = 1111
 ./calc.at:1441:  $PREPARSER ./calc  input
-stderr:
   | 
   | +1
 ./calc.at:1445:  $PREPARSER ./calc  input
+stderr:
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -145394,8 +145414,16 @@
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
+./calc.at:1440: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Stack now 0
@@ -145421,6 +145449,8 @@
 Stack now 0
 Cleanup: discarding lookahead token '+' ()
 Stack now 0
+stderr:
+./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -145517,8 +145547,8 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1440: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -145544,6 +145574,7 @@
 Stack now 0
 Cleanup: discarding lookahead token '+' ()
 Stack now 0
+530. calc.at:1440:  ok
 ./calc.at:1441: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -145554,10 +145585,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+
 ./calc.at:1441: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1441:  $PREPARSER ./calc  input
 ./calc.at:1445: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -145568,6 +145597,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+  | (1 + # + 1) = 1111
+./calc.at:1441:  $PREPARSER ./calc  input
+./calc.at:1445: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -145685,12 +145718,22 @@
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1445: cat stderr
+./calc.at:1445:  $PREPARSER ./calc  /dev/null
+stderr:
 stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
+Next token is token end of input ()
+syntax error, unexpected end of input
+Cleanup: discarding lookahead token end of input ()
+Stack now 0
+538. calc.at:1453: testing Calculator C++ %locations parse.lac=full parse.error=verbose  ...
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
 Next token is token '(' (1.1: )
 Shifting token '(' (1.1: )
 Entering state 4
@@ -145802,7 +145845,10 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1445:  $PREPARSER ./calc  /dev/null
+./calc.at:1453: mv calc.y.tmp calc.y
+
+./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1453: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 stderr:
 Starting parse
 Entering state 0
@@ -145822,21 +145868,23 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1445: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1441: cat stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token end of input ()
-syntax error, unexpected end of input
-Cleanup: discarding lookahead token end of input ()
-Stack now 0
+./calc.at:1445: cat stderr
 input:
   | (1 + 1) / (1 - 1)
 ./calc.at:1441:  $PREPARSER ./calc  input
 stderr:
+input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -145979,17 +146027,9 @@
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1445: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 stderr:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1445:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -146131,22 +146171,7 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1445: cat stderr
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1445:  $PREPARSER ./calc  input
-./calc.at:1441: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 stderr:
-./calc.at:1441: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -146464,8 +146489,19 @@
 Stack now 0 6 16
 Cleanup: popping token end of input ()
 Cleanup: popping nterm input ()
+./calc.at:1441: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-531. calc.at:1441:  ok
+./calc.at:1453: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+./calc.at:1441: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -146784,7 +146820,7 @@
 Stack now 0 6 16
 Cleanup: popping token end of input ()
 Cleanup: popping nterm input ()
-
+531. calc.at:1441:  ok
 ./calc.at:1445: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -146795,6 +146831,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+
 ./calc.at:1445: cat stderr
 input:
   | (!!) + (1 2) = 1
@@ -147085,6 +147122,7 @@
 539. calc.at:1454: testing Calculator C++ %locations parse.lac=full parse.error=detailed  ...
 ./calc.at:1454: mv calc.y.tmp calc.y
 
+./calc.at:1454: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 ./calc.at:1445: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -147095,7 +147133,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1454: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 ./calc.at:1445: cat stderr
 input:
   | (- *) + (1 2) = 1
@@ -147399,6 +147436,7 @@
 Stack now 0 6 16
 Cleanup: popping token end of input ()
 Cleanup: popping nterm input ()
+./calc.at:1454: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 ./calc.at:1445: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -147413,7 +147451,6 @@
 input:
   | (* *) + (*) + (*)
 ./calc.at:1445:  $PREPARSER ./calc  input
-./calc.at:1454: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 stderr:
 Starting parse
 Entering state 0
@@ -147719,6 +147756,19 @@
 Stack now 0 6 16
 Cleanup: popping token end of input ()
 Cleanup: popping nterm input ()
+stderr:
+stdout:
+./calc.at:1443: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc calc.hh
+
 ./calc.at:1445: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -147729,1806 +147779,209 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1445: cat stderr
 input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1445:  $PREPARSER ./calc  input
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1443:  $PREPARSER ./calc  input
+./calc.at:1445: cat stderr
 stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
 Entering state 1
 Stack now 0 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
 Stack now 0 8
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
 Entering state 20
 Stack now 0 8 20
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
 Entering state 1
 Stack now 0 8 20 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (2)
--> $$ = nterm exp (2)
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
 Entering state 29
 Stack now 0 8 20 29
 Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
 Entering state 21
 Stack now 0 8 20 29 21
 Reading a token
-Next token is token number (3)
-Shifting token number (3)
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
 Entering state 1
 Stack now 0 8 20 29 21 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (3)
--> $$ = nterm exp (3)
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
 Entering state 30
 Stack now 0 8 20 29 21 30
 Reading a token
-Next token is token '+' ()
+Next token is token '=' (1.11: )
 Reducing stack by rule 9 (line 92):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
 Entering state 29
 Stack now 0 8 20 29
-Next token is token '+' ()
+Next token is token '=' (1.11: )
 Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
 Entering state 8
 Stack now 0 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Stack now 0 8 20
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 18
+Stack now 0 8 18
 Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Stack now 0 8 20 5
+Next token is token number (1.13: 7)
+Shifting token number (1.13: 7)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.13: 7)
+-> $$ = nterm exp (1.13: 7)
+Entering state 27
+Stack now 0 8 18 27
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 14
-Stack now 0 8 20 5 14
-Reducing stack by rule 17 (line 108):
-   $1 = token '!' ()
-   $2 = token '+' ()
-Stack now 0 8 20
-Cleanup: popping token '+' ()
-Cleanup: popping nterm exp (7)
-./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '\n' (1.14-2.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1.1-9: 7)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13: 7)
+-> $$ = nterm exp (1.1-13: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.14-2.0: )
+Shifting token '\n' (1.14-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-13: 7)
+   $2 = token '\n' (1.14-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token number (2.1: 1)
+Shifting token number (2.1: 1)
 Entering state 1
-Stack now 0 1
+Stack now 0 6 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+   $1 = token number (2.1: 1)
+-> $$ = nterm exp (2.1: 1)
 Entering state 8
-Stack now 0 8
+Stack now 0 6 8
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
+Next token is token '+' (2.3: )
+Shifting token '+' (2.3: )
 Entering state 20
-Stack now 0 8 20
+Stack now 0 6 8 20
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token number (2.5: 2)
+Shifting token number (2.5: 2)
 Entering state 1
-Stack now 0 8 20 1
+Stack now 0 6 8 20 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (2)
--> $$ = nterm exp (2)
+   $1 = token number (2.5: 2)
+-> $$ = nterm exp (2.5: 2)
 Entering state 29
-Stack now 0 8 20 29
+Stack now 0 6 8 20 29
 Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
+Next token is token '*' (2.7: )
+Shifting token '*' (2.7: )
 Entering state 21
-Stack now 0 8 20 29 21
+Stack now 0 6 8 20 29 21
 Reading a token
-Next token is token number (3)
-Shifting token number (3)
+Next token is token '-' (2.9: )
+Shifting token '-' (2.9: )
+Entering state 2
+Stack now 0 6 8 20 29 21 2
+Reading a token
+Next token is token number (2.10: 3)
+Shifting token number (2.10: 3)
 Entering state 1
-Stack now 0 8 20 29 21 1
+Stack now 0 6 8 20 29 21 2 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 30
-Stack now 0 8 20 29 21 30
+   $1 = token number (2.10: 3)
+-> $$ = nterm exp (2.10: 3)
+Entering state 10
+Stack now 0 6 8 20 29 21 2 10
 Reading a token
-Next token is token '+' ()
+Next token is token '=' (2.12: )
+Reducing stack by rule 11 (line 102):
+   $1 = token '-' (2.9: )
+   $2 = nterm exp (2.10: 3)
+-> $$ = nterm exp (2.9-10: -3)
+Entering state 30
+Stack now 0 6 8 20 29 21 30
+Next token is token '=' (2.12: )
 Reducing stack by rule 9 (line 92):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
+   $1 = nterm exp (2.5: 2)
+   $2 = token '*' (2.7: )
+   $3 = nterm exp (2.9-10: -3)
+-> $$ = nterm exp (2.5-10: -6)
 Entering state 29
-Stack now 0 8 20 29
-Next token is token '+' ()
+Stack now 0 6 8 20 29
+Next token is token '=' (2.12: )
 Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
+   $1 = nterm exp (2.1: 1)
+   $2 = token '+' (2.3: )
+   $3 = nterm exp (2.5-10: -6)
+-> $$ = nterm exp (2.1-10: -5)
 Entering state 8
-Stack now 0 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Stack now 0 8 20 5
+Stack now 0 6 8
+Next token is token '=' (2.12: )
+Shifting token '=' (2.12: )
+Entering state 18
+Stack now 0 6 8 18
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 14
-Stack now 0 8 20 5 14
-Reducing stack by rule 17 (line 108):
-   $1 = token '!' ()
-   $2 = token '+' ()
-Stack now 0 8 20
-Cleanup: popping token '+' ()
-Cleanup: popping nterm exp (7)
-./calc.at:1445: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1445:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '-' (2.14: )
+Shifting token '-' (2.14: )
+Entering state 2
+Stack now 0 6 8 18 2
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token number (2.15: 5)
+Shifting token number (2.15: 5)
 Entering state 1
-Stack now 0 1
+Stack now 0 6 8 18 2 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Stack now 0 8 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Stack now 0 8 20 29 21
-Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Stack now 0 8 20 29 21 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 30
-Stack now 0 8 20 29 21 30
-Reading a token
-Next token is token '+' ()
-Reducing stack by rule 9 (line 92):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
-Entering state 29
-Stack now 0 8 20 29
-Next token is token '+' ()
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Stack now 0 8 20 5
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 13
-Stack now 0 8 20 5 13
-Reducing stack by rule 18 (line 109):
-   $1 = token '!' ()
-   $2 = token '-' ()
-Stack now 0 8 20
-Cleanup: popping token '+' ()
-Cleanup: popping nterm exp (7)
-./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Stack now 0 8 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Stack now 0 8 20 29 21
-Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Stack now 0 8 20 29 21 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 30
-Stack now 0 8 20 29 21 30
-Reading a token
-Next token is token '+' ()
-Reducing stack by rule 9 (line 92):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
-Entering state 29
-Stack now 0 8 20 29
-Next token is token '+' ()
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
-Entering state 8
-Stack now 0 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Stack now 0 8 20 5
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 13
-Stack now 0 8 20 5 13
-Reducing stack by rule 18 (line 109):
-   $1 = token '!' ()
-   $2 = token '-' ()
-Stack now 0 8 20
-Cleanup: popping token '+' ()
-Cleanup: popping nterm exp (7)
-./calc.at:1445: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1445: cat stderr
-input:
-  | (#) + (#) = 2222
-./calc.at:1445:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Stack now 0 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token ()
-Error: discarding token invalid token ()
-Error: popping token error ()
-Stack now 0 4
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Stack now 0 8 20 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Stack now 0 8 20 4 11
-Next token is token invalid token ()
-Error: discarding token invalid token ()
-Error: popping token error ()
-Stack now 0 8 20 4
-Shifting token error ()
-Entering state 11
-Stack now 0 8 20 4 11
-Reading a token
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Stack now 0 8 20 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '=' ()
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Stack now 0 8 18
-Reading a token
-Next token is token number (2222)
-Shifting token number (2222)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (2222)
--> $$ = nterm exp (2222)
-Entering state 27
-Stack now 0 8 18 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (2222)
-   $2 = token '=' ()
-   $3 = nterm exp (2222)
--> $$ = nterm exp (2222)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (2222)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input ()
-Shifting token end of input ()
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input ()
-Cleanup: popping nterm input ()
-./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Stack now 0 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token ()
-Error: discarding token invalid token ()
-Error: popping token error ()
-Stack now 0 4
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Stack now 0 8 20 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Stack now 0 8 20 4 11
-Next token is token invalid token ()
-Error: discarding token invalid token ()
-Error: popping token error ()
-Stack now 0 8 20 4
-Shifting token error ()
-Entering state 11
-Stack now 0 8 20 4 11
-Reading a token
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Stack now 0 8 20 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '=' ()
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Stack now 0 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Stack now 0 8 18
-Reading a token
-Next token is token number (2222)
-Shifting token number (2222)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (2222)
--> $$ = nterm exp (2222)
-Entering state 27
-Stack now 0 8 18 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (2222)
-   $2 = token '=' ()
-   $3 = nterm exp (2222)
--> $$ = nterm exp (2222)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (2222)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input ()
-Shifting token end of input ()
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input ()
-Cleanup: popping nterm input ()
-./calc.at:1445: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1445: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1445:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Stack now 0 4 12 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Stack now 0 4 12
-Error: popping nterm exp (1)
-Stack now 0 4
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token ()
-Error: discarding token invalid token ()
-Error: popping token error ()
-Stack now 0 4
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Stack now 0 8 18
-Reading a token
-Next token is token number (1111)
-Shifting token number (1111)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Stack now 0 8 18 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input ()
-Shifting token end of input ()
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input ()
-Cleanup: popping nterm input ()
-./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Stack now 0 4 12 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Stack now 0 4 12
-Error: popping nterm exp (1)
-Stack now 0 4
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token ()
-Error: discarding token invalid token ()
-Error: popping token error ()
-Stack now 0 4
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Stack now 0 8 18
-Reading a token
-Next token is token number (1111)
-Shifting token number (1111)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Stack now 0 8 18 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input ()
-Shifting token end of input ()
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input ()
-Cleanup: popping nterm input ()
-./calc.at:1445: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1445: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1445:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Stack now 0 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token ()
-Error: discarding token invalid token ()
-Error: popping token error ()
-Stack now 0 4
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Error: popping token error ()
-Stack now 0 4
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1)
-Error: discarding token number (1)
-Error: popping token error ()
-Stack now 0 4
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Stack now 0 8 18
-Reading a token
-Next token is token number (1111)
-Shifting token number (1111)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Stack now 0 8 18 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input ()
-Shifting token end of input ()
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input ()
-Cleanup: popping nterm input ()
-./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Stack now 0 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token ()
-Error: discarding token invalid token ()
-Error: popping token error ()
-Stack now 0 4
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Error: popping token error ()
-Stack now 0 4
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1)
-Error: discarding token number (1)
-Error: popping token error ()
-Stack now 0 4
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Stack now 0 8 18
-Reading a token
-Next token is token number (1111)
-Shifting token number (1111)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Stack now 0 8 18 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input ()
-Shifting token end of input ()
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input ()
-Cleanup: popping nterm input ()
-./calc.at:1445: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1445: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1445:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Stack now 0 4 12 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Stack now 0 4 12
-Error: popping nterm exp (1)
-Stack now 0 4
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token ()
-Error: discarding token invalid token ()
-Error: popping token error ()
-Stack now 0 4
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Error: popping token error ()
-Stack now 0 4
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1)
-Error: discarding token number (1)
-Error: popping token error ()
-Stack now 0 4
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Stack now 0 8 18
-Reading a token
-Next token is token number (1111)
-Shifting token number (1111)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Stack now 0 8 18 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input ()
-Shifting token end of input ()
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input ()
-Cleanup: popping nterm input ()
-./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Stack now 0 4 12 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Stack now 0 4 12
-Error: popping nterm exp (1)
-Stack now 0 4
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token ()
-Error: discarding token invalid token ()
-Error: popping token error ()
-Stack now 0 4
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Error: popping token error ()
-Stack now 0 4
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1)
-Error: discarding token number (1)
-Error: popping token error ()
-Stack now 0 4
-Shifting token error ()
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Stack now 0 8 18
-Reading a token
-Next token is token number (1111)
-Shifting token number (1111)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Stack now 0 8 18 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input ()
-Shifting token end of input ()
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input ()
-Cleanup: popping nterm input ()
-./calc.at:1445: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1445: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1445:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Stack now 0 4 12 20
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Stack now 0 4 12 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 29
-Stack now 0 4 12 20 29
-Reading a token
-Next token is token ')' ()
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Stack now 0 4 12 26
-Reducing stack by rule 13 (line 104):
-   $1 = token '(' ()
-   $2 = nterm exp (2)
-   $3 = token ')' ()
--> $$ = nterm exp (2)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Stack now 0 8 22
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Stack now 0 8 22 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Stack now 0 8 22 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Stack now 0 8 22 4 12
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
-Stack now 0 8 22 4 12 19
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Stack now 0 8 22 4 12 19 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 28
-Stack now 0 8 22 4 12 19 28
-Reading a token
-Next token is token ')' ()
-Reducing stack by rule 8 (line 91):
-   $1 = nterm exp (1)
-   $2 = token '-' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (0)
-Entering state 12
-Stack now 0 8 22 4 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Stack now 0 8 22 4 12 26
-Reducing stack by rule 13 (line 104):
-   $1 = token '(' ()
-   $2 = nterm exp (0)
-   $3 = token ')' ()
--> $$ = nterm exp (0)
-Entering state 31
-Stack now 0 8 22 31
-Reading a token
-Next token is token '\n' ()
-Reducing stack by rule 10 (line 93):
-   $1 = nterm exp (2)
-   $2 = token '/' ()
-   $3 = nterm exp (0)
-error: null divisor
--> $$ = nterm exp (2)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (2)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input ()
-Shifting token end of input ()
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input ()
-Cleanup: popping nterm input ()
-./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Stack now 0 4 12 20
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Stack now 0 4 12 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 29
-Stack now 0 4 12 20 29
-Reading a token
-Next token is token ')' ()
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Stack now 0 4 12 26
-Reducing stack by rule 13 (line 104):
-   $1 = token '(' ()
-   $2 = nterm exp (2)
-   $3 = token ')' ()
--> $$ = nterm exp (2)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Stack now 0 8 22
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Stack now 0 8 22 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Stack now 0 8 22 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Stack now 0 8 22 4 12
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
-Stack now 0 8 22 4 12 19
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Stack now 0 8 22 4 12 19 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 28
-Stack now 0 8 22 4 12 19 28
-Reading a token
-Next token is token ')' ()
-Reducing stack by rule 8 (line 91):
-   $1 = nterm exp (1)
-   $2 = token '-' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (0)
-Entering state 12
-Stack now 0 8 22 4 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Stack now 0 8 22 4 12 26
-Reducing stack by rule 13 (line 104):
-   $1 = token '(' ()
-   $2 = nterm exp (0)
-   $3 = token ')' ()
--> $$ = nterm exp (0)
-Entering state 31
-Stack now 0 8 22 31
-Reading a token
-Next token is token '\n' ()
-Reducing stack by rule 10 (line 93):
-   $1 = nterm exp (2)
-   $2 = token '/' ()
-   $3 = nterm exp (0)
-error: null divisor
--> $$ = nterm exp (2)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (2)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input ()
-Shifting token end of input ()
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input ()
-Cleanup: popping nterm input ()
-./calc.at:1445: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1445: cat stderr
-533. calc.at:1445:  ok
-
-540. calc.at:1455: testing Calculator C++ %locations parse.lac=full parse.error=detailed parse.trace  ...
-./calc.at:1455: mv calc.y.tmp calc.y
-
-./calc.at:1455: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1455: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
-stderr:
-stdout:
-./calc.at:1443: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc calc.hh
-
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1443:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
-Stack now 0 8 20 29 21
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 20 29 21 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Stack now 0 8 20 29 21 30
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Stack now 0 8 20 29
-Next token is token '=' (1.11: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 18
-Stack now 0 8 18
-Reading a token
-Next token is token number (1.13: 7)
-Shifting token number (1.13: 7)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.13: 7)
--> $$ = nterm exp (1.13: 7)
-Entering state 27
-Stack now 0 8 18 27
-Reading a token
-Next token is token '\n' (1.14-2.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-9: 7)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13: 7)
--> $$ = nterm exp (1.1-13: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.14-2.0: )
-Shifting token '\n' (1.14-2.0: )
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-13: 7)
-   $2 = token '\n' (1.14-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token number (2.1: 1)
-Shifting token number (2.1: 1)
-Entering state 1
-Stack now 0 6 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (2.1: 1)
--> $$ = nterm exp (2.1: 1)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '+' (2.3: )
-Shifting token '+' (2.3: )
-Entering state 20
-Stack now 0 6 8 20
-Reading a token
-Next token is token number (2.5: 2)
-Shifting token number (2.5: 2)
-Entering state 1
-Stack now 0 6 8 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (2.5: 2)
--> $$ = nterm exp (2.5: 2)
-Entering state 29
-Stack now 0 6 8 20 29
-Reading a token
-Next token is token '*' (2.7: )
-Shifting token '*' (2.7: )
-Entering state 21
-Stack now 0 6 8 20 29 21
-Reading a token
-Next token is token '-' (2.9: )
-Shifting token '-' (2.9: )
-Entering state 2
-Stack now 0 6 8 20 29 21 2
-Reading a token
-Next token is token number (2.10: 3)
-Shifting token number (2.10: 3)
-Entering state 1
-Stack now 0 6 8 20 29 21 2 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (2.10: 3)
--> $$ = nterm exp (2.10: 3)
-Entering state 10
-Stack now 0 6 8 20 29 21 2 10
-Reading a token
-Next token is token '=' (2.12: )
-Reducing stack by rule 11 (line 102):
-   $1 = token '-' (2.9: )
-   $2 = nterm exp (2.10: 3)
--> $$ = nterm exp (2.9-10: -3)
-Entering state 30
-Stack now 0 6 8 20 29 21 30
-Next token is token '=' (2.12: )
-Reducing stack by rule 9 (line 92):
-   $1 = nterm exp (2.5: 2)
-   $2 = token '*' (2.7: )
-   $3 = nterm exp (2.9-10: -3)
--> $$ = nterm exp (2.5-10: -6)
-Entering state 29
-Stack now 0 6 8 20 29
-Next token is token '=' (2.12: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (2.1: 1)
-   $2 = token '+' (2.3: )
-   $3 = nterm exp (2.5-10: -6)
--> $$ = nterm exp (2.1-10: -5)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (2.12: )
-Shifting token '=' (2.12: )
-Entering state 18
-Stack now 0 6 8 18
-Reading a token
-Next token is token '-' (2.14: )
-Shifting token '-' (2.14: )
-Entering state 2
-Stack now 0 6 8 18 2
-Reading a token
-Next token is token number (2.15: 5)
-Shifting token number (2.15: 5)
-Entering state 1
-Stack now 0 6 8 18 2 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (2.15: 5)
--> $$ = nterm exp (2.15: 5)
-Entering state 10
-Stack now 0 6 8 18 2 10
+   $1 = token number (2.15: 5)
+-> $$ = nterm exp (2.15: 5)
+Entering state 10
+Stack now 0 6 8 18 2 10
 Reading a token
 Next token is token '\n' (2.16-3.0: )
 Reducing stack by rule 11 (line 102):
@@ -150360,7 +148813,11 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
+input:
 ./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1445:  $PREPARSER ./calc  input
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -151380,8 +149837,171 @@
 Cleanup: popping token end of input (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
 ./calc.at:1443: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Stack now 0 8 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Stack now 0 8 20 29 21
+Reading a token
+Next token is token number (3)
+Shifting token number (3)
+Entering state 1
+Stack now 0 8 20 29 21 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 30
+Stack now 0 8 20 29 21 30
+Reading a token
+Next token is token '+' ()
+Reducing stack by rule 9 (line 92):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Stack now 0 8 20 29
+Next token is token '+' ()
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
+Stack now 0 8 20 5
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 14
+Stack now 0 8 20 5 14
+Reducing stack by rule 17 (line 108):
+   $1 = token '!' ()
+   $2 = token '+' ()
+Stack now 0 8 20
+Cleanup: popping token '+' ()
+Cleanup: popping nterm exp (7)
+./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Stack now 0 8 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Stack now 0 8 20 29 21
+Reading a token
+Next token is token number (3)
+Shifting token number (3)
+Entering state 1
+Stack now 0 8 20 29 21 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 30
+Stack now 0 8 20 29 21 30
+Reading a token
+Next token is token '+' ()
+Reducing stack by rule 9 (line 92):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Stack now 0 8 20 29
+Next token is token '+' ()
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
+Stack now 0 8 20 5
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 14
+Stack now 0 8 20 5 14
+Reducing stack by rule 17 (line 108):
+   $1 = token '!' ()
+   $2 = token '+' ()
+Stack now 0 8 20
+Cleanup: popping token '+' ()
+Cleanup: popping nterm exp (7)
   | 1 2
+./calc.at:1445: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 ./calc.at:1443:  $PREPARSER ./calc  input
 stderr:
 Starting parse
@@ -151404,8 +150024,92 @@
 Stack now 0
 Cleanup: discarding lookahead token number (1.3: 2)
 Stack now 0
+input:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1445:  $PREPARSER ./calc  input
 ./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Stack now 0 8 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Stack now 0 8 20 29 21
+Reading a token
+Next token is token number (3)
+Shifting token number (3)
+Entering state 1
+Stack now 0 8 20 29 21 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 30
+Stack now 0 8 20 29 21 30
+Reading a token
+Next token is token '+' ()
+Reducing stack by rule 9 (line 92):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Stack now 0 8 20 29
+Next token is token '+' ()
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
+Stack now 0 8 20 5
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 13
+Stack now 0 8 20 5 13
+Reducing stack by rule 18 (line 109):
+   $1 = token '!' ()
+   $2 = token '-' ()
+Stack now 0 8 20
+Cleanup: popping token '+' ()
+Cleanup: popping nterm exp (7)
 Starting parse
 Entering state 0
 Stack now 0
@@ -151426,6 +150130,8 @@
 Stack now 0
 Cleanup: discarding lookahead token number (1.3: 2)
 Stack now 0
+./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./calc.at:1443: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -151436,10 +150142,100 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Stack now 0 8 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Stack now 0 8 20 29 21
+Reading a token
+Next token is token number (3)
+Shifting token number (3)
+Entering state 1
+Stack now 0 8 20 29 21 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 30
+Stack now 0 8 20 29 21 30
+Reading a token
+Next token is token '+' ()
+Reducing stack by rule 9 (line 92):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Stack now 0 8 20 29
+Next token is token '+' ()
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
+Stack now 0 8 20 5
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 13
+Stack now 0 8 20 5 13
+Reducing stack by rule 18 (line 109):
+   $1 = token '!' ()
+   $2 = token '-' ()
+Stack now 0 8 20
+Cleanup: popping token '+' ()
+Cleanup: popping nterm exp (7)
 ./calc.at:1443: cat stderr
 input:
   | 1//2
 ./calc.at:1443:  $PREPARSER ./calc  input
+./calc.at:1445: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -151468,6 +150264,7 @@
 Stack now 0
 Cleanup: discarding lookahead token '/' (1.3: )
 Stack now 0
+./calc.at:1445: cat stderr
 ./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
@@ -151497,6 +150294,9 @@
 Stack now 0
 Cleanup: discarding lookahead token '/' (1.3: )
 Stack now 0
+input:
+  | (#) + (#) = 2222
+./calc.at:1445:  $PREPARSER ./calc  input
 ./calc.at:1443: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -151507,7 +150307,256 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
 ./calc.at:1443: cat stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Stack now 0 4
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token ()
+Error: discarding token invalid token ()
+Error: popping token error ()
+Stack now 0 4
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Stack now 0 8 20 4
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Stack now 0 8 20 4 11
+Next token is token invalid token ()
+Error: discarding token invalid token ()
+Error: popping token error ()
+Stack now 0 8 20 4
+Shifting token error ()
+Entering state 11
+Stack now 0 8 20 4 11
+Reading a token
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Stack now 0 8 20 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '=' ()
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (2222)
+Shifting token number (2222)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (2222)
+-> $$ = nterm exp (2222)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (2222)
+   $2 = token '=' ()
+   $3 = nterm exp (2222)
+-> $$ = nterm exp (2222)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (2222)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input ()
+Shifting token end of input ()
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Stack now 0 4
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token ()
+Error: discarding token invalid token ()
+Error: popping token error ()
+Stack now 0 4
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Stack now 0 8 20 4
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Stack now 0 8 20 4 11
+Next token is token invalid token ()
+Error: discarding token invalid token ()
+Error: popping token error ()
+Stack now 0 8 20 4
+Shifting token error ()
+Entering state 11
+Stack now 0 8 20 4 11
+Reading a token
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Stack now 0 8 20 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '=' ()
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (2222)
+Shifting token number (2222)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (2222)
+-> $$ = nterm exp (2222)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (2222)
+   $2 = token '=' ()
+   $3 = nterm exp (2222)
+-> $$ = nterm exp (2222)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (2222)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input ()
+Shifting token end of input ()
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
 input:
   | error
 ./calc.at:1443:  $PREPARSER ./calc  input
@@ -151521,6 +150570,16 @@
 Cleanup: discarding lookahead token invalid token (1.1: )
 Stack now 0
 ./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1445: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -151530,6 +150589,7 @@
 1.1: syntax error, unexpected invalid token
 Cleanup: discarding lookahead token invalid token (1.1: )
 Stack now 0
+./calc.at:1445: cat stderr
 ./calc.at:1443: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -151542,9 +150602,114 @@
 ' expout || exit 77
 ./calc.at:1443: cat stderr
 input:
+  | (1 + #) = 1111
+./calc.at:1445:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Stack now 0 4 12 20
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Stack now 0 4 12
+Error: popping nterm exp (1)
+Stack now 0 4
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token ()
+Error: discarding token invalid token ()
+Error: popping token error ()
+Stack now 0 4
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1111)
+Shifting token number (1111)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input ()
+Shifting token end of input ()
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+input:
+./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 1 = 2 = 3
 ./calc.at:1443:  $PREPARSER ./calc  input
 stderr:
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -151584,6 +150749,105 @@
 Stack now 0
 Cleanup: discarding lookahead token '=' (1.7: )
 Stack now 0
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Stack now 0 4 12 20
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Stack now 0 4 12
+Error: popping nterm exp (1)
+Stack now 0 4
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token ()
+Error: discarding token invalid token ()
+Error: popping token error ()
+Stack now 0 4
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1111)
+Shifting token number (1111)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input ()
+Shifting token end of input ()
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
 ./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
@@ -151625,6 +150889,17 @@
 Stack now 0
 Cleanup: discarding lookahead token '=' (1.7: )
 Stack now 0
+./calc.at:1445: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1445: cat stderr
 ./calc.at:1443: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -151635,12 +150910,220 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+  | (# + 1) = 1111
+./calc.at:1445:  $PREPARSER ./calc  input
 ./calc.at:1443: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Stack now 0 4
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token ()
+Error: discarding token invalid token ()
+Error: popping token error ()
+Stack now 0 4
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Error: popping token error ()
+Stack now 0 4
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1)
+Error: discarding token number (1)
+Error: popping token error ()
+Stack now 0 4
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1111)
+Shifting token number (1111)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input ()
+Shifting token end of input ()
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Stack now 0 4
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token ()
+Error: discarding token invalid token ()
+Error: popping token error ()
+Stack now 0 4
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Error: popping token error ()
+Stack now 0 4
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1)
+Error: discarding token number (1)
+Error: popping token error ()
+Stack now 0 4
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1111)
+Shifting token number (1111)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input ()
+Shifting token end of input ()
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
 input:
   | 
   | +1
 ./calc.at:1443:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1445: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Stack now 0
@@ -151667,6 +151150,7 @@
 Cleanup: discarding lookahead token '+' (2.1: )
 Stack now 0
 ./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1445: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -151703,6 +151187,242 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+  | (1 + # + 1) = 1111
+./calc.at:1445:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Stack now 0 4 12 20
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Stack now 0 4 12
+Error: popping nterm exp (1)
+Stack now 0 4
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token ()
+Error: discarding token invalid token ()
+Error: popping token error ()
+Stack now 0 4
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Error: popping token error ()
+Stack now 0 4
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1)
+Error: discarding token number (1)
+Error: popping token error ()
+Stack now 0 4
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1111)
+Shifting token number (1111)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input ()
+Shifting token end of input ()
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Stack now 0 4 12 20
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Stack now 0 4 12
+Error: popping nterm exp (1)
+Stack now 0 4
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token ()
+Error: discarding token invalid token ()
+Error: popping token error ()
+Stack now 0 4
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Error: popping token error ()
+Stack now 0 4
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1)
+Error: discarding token number (1)
+Error: popping token error ()
+Stack now 0 4
+Shifting token error ()
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1111)
+Shifting token number (1111)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input ()
+Shifting token end of input ()
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
 ./calc.at:1443: cat stderr
 ./calc.at:1443:  $PREPARSER ./calc  /dev/null
 stderr:
@@ -151715,6 +151435,16 @@
 Cleanup: discarding lookahead token end of input (1.1: )
 Stack now 0
 ./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1445: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -151724,6 +151454,7 @@
 1.1: syntax error, unexpected end of input
 Cleanup: discarding lookahead token end of input (1.1: )
 Stack now 0
+./calc.at:1445: cat stderr
 ./calc.at:1443: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -151734,10 +151465,298 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1445:  $PREPARSER ./calc  input
 ./calc.at:1443: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Stack now 0 4 12 20
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Stack now 0 4 12 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Stack now 0 4 12 20 29
+Reading a token
+Next token is token ')' ()
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Stack now 0 4 12 26
+Reducing stack by rule 13 (line 104):
+   $1 = token '(' ()
+   $2 = nterm exp (2)
+   $3 = token ')' ()
+-> $$ = nterm exp (2)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
+Stack now 0 8 22
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Stack now 0 8 22 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Stack now 0 8 22 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Stack now 0 8 22 4 12
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
+Stack now 0 8 22 4 12 19
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Stack now 0 8 22 4 12 19 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 28
+Stack now 0 8 22 4 12 19 28
+Reading a token
+Next token is token ')' ()
+Reducing stack by rule 8 (line 91):
+   $1 = nterm exp (1)
+   $2 = token '-' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (0)
+Entering state 12
+Stack now 0 8 22 4 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Stack now 0 8 22 4 12 26
+Reducing stack by rule 13 (line 104):
+   $1 = token '(' ()
+   $2 = nterm exp (0)
+   $3 = token ')' ()
+-> $$ = nterm exp (0)
+Entering state 31
+Stack now 0 8 22 31
+Reading a token
+Next token is token '\n' ()
+Reducing stack by rule 10 (line 93):
+   $1 = nterm exp (2)
+   $2 = token '/' ()
+   $3 = nterm exp (0)
+error: null divisor
+-> $$ = nterm exp (2)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (2)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input ()
+Shifting token end of input ()
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 input:
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 ./calc.at:1443:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Stack now 0 4 12 20
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Stack now 0 4 12 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Stack now 0 4 12 20 29
+Reading a token
+Next token is token ')' ()
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Stack now 0 4 12 26
+Reducing stack by rule 13 (line 104):
+   $1 = token '(' ()
+   $2 = nterm exp (2)
+   $3 = token ')' ()
+-> $$ = nterm exp (2)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
+Stack now 0 8 22
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Stack now 0 8 22 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Stack now 0 8 22 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Stack now 0 8 22 4 12
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
+Stack now 0 8 22 4 12 19
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Stack now 0 8 22 4 12 19 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 28
+Stack now 0 8 22 4 12 19 28
+Reading a token
+Next token is token ')' ()
+Reducing stack by rule 8 (line 91):
+   $1 = nterm exp (1)
+   $2 = token '-' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (0)
+Entering state 12
+Stack now 0 8 22 4 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Stack now 0 8 22 4 12 26
+Reducing stack by rule 13 (line 104):
+   $1 = token '(' ()
+   $2 = nterm exp (0)
+   $3 = token ')' ()
+-> $$ = nterm exp (0)
+Entering state 31
+Stack now 0 8 22 31
+Reading a token
+Next token is token '\n' ()
+Reducing stack by rule 10 (line 93):
+   $1 = nterm exp (2)
+   $2 = token '/' ()
+   $3 = nterm exp (0)
+error: null divisor
+-> $$ = nterm exp (2)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (2)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input ()
+Shifting token end of input ()
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
 stderr:
 Starting parse
 Entering state 0
@@ -152058,6 +152077,16 @@
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1445: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Stack now 0
@@ -152375,6 +152404,7 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1445: cat stderr
 ./calc.at:1443: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -152386,6 +152416,7 @@
   }eg
 ' expout || exit 77
 ./calc.at:1443: cat stderr
+533. calc.at:1445:  ok
 input:
   | (!!) + (1 2) = 1
 ./calc.at:1443:  $PREPARSER ./calc  input
@@ -152530,6 +152561,7 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+
 ./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
@@ -152683,6 +152715,10 @@
   }eg
 ' expout || exit 77
 ./calc.at:1443: cat stderr
+540. calc.at:1455: testing Calculator C++ %locations parse.lac=full parse.error=detailed parse.trace  ...
+./calc.at:1455: mv calc.y.tmp calc.y
+
+./calc.at:1455: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 input:
   | (- *) + (1 2) = 1
 ./calc.at:1443:  $PREPARSER ./calc  input
@@ -152995,6 +153031,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1455: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 ./calc.at:1443: cat stderr
 input:
   | (* *) + (*) + (*)
@@ -153482,10 +153519,24 @@
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
 ./calc.at:1443: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+stderr:
+stdout:
+./calc.at:1446: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc calc.hh
+
 input:
   | 1 + 2 * 3 + !- ++
 ./calc.at:1443:  $PREPARSER ./calc  input
 stderr:
+input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -153566,8 +153617,23 @@
 Stack now 0 8 20
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1446:  $PREPARSER ./calc  input
 ./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -153612,7 +153678,7 @@
 Entering state 30
 Stack now 0 8 20 29 21 30
 Reading a token
-Next token is token '+' (1.11: )
+Next token is token '=' (1.11: )
 Reducing stack by rule 9 (line 92):
    $1 = nterm exp (1.5: 2)
    $2 = token '*' (1.7: )
@@ -153620,7 +153686,7 @@
 -> $$ = nterm exp (1.5-9: 6)
 Entering state 29
 Stack now 0 8 20 29
-Next token is token '+' (1.11: )
+Next token is token '=' (1.11: )
 Reducing stack by rule 7 (line 90):
    $1 = nterm exp (1.1: 1)
    $2 = token '+' (1.3: )
@@ -153628,148 +153694,36 @@
 -> $$ = nterm exp (1.1-9: 7)
 Entering state 8
 Stack now 0 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Stack now 0 8 20 5
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Stack now 0 8 20 5 13
-Reducing stack by rule 18 (line 109):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Stack now 0 8 20
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1443: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1443: cat stderr
-input:
-  | (#) + (#) = 2222
-./calc.at:1443:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.1-2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.1-2: )
-Stack now 0 4
-Shifting token error (1.1-2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1-2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
-Entering state 4
-Stack now 0 8 20 4
-Reading a token
-1.8: syntax error: invalid character: '#'
-Next token is token error (1.8: )
-Shifting token error (1.1-8: )
-Entering state 11
-Stack now 0 8 20 4 11
-Next token is token invalid token (1.8: )
-Error: discarding token invalid token (1.8: )
-Error: popping token error (1.1-8: )
-Stack now 0 8 20 4
-Shifting token error (1.1-8: )
-Entering state 11
-Stack now 0 8 20 4 11
-Reading a token
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 25
-Stack now 0 8 20 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.1-8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
-Entering state 8
-Stack now 0 8
 Next token is token '=' (1.11: )
 Shifting token '=' (1.11: )
 Entering state 18
 Stack now 0 8 18
 Reading a token
-Next token is token number (1.13-16: 2222)
-Shifting token number (1.13-16: 2222)
+Next token is token number (1.13: 7)
+Shifting token number (1.13: 7)
 Entering state 1
 Stack now 0 8 18 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
+   $1 = token number (1.13: 7)
+-> $$ = nterm exp (1.13: 7)
 Entering state 27
 Stack now 0 8 18 27
 Reading a token
-Next token is token '\n' (1.17-2.0: )
+Next token is token '\n' (1.14-2.0: )
 Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-9: 2222)
+   $1 = nterm exp (1.1-9: 7)
    $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
+   $3 = nterm exp (1.13: 7)
+-> $$ = nterm exp (1.1-13: 7)
 Entering state 8
 Stack now 0 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
+Next token is token '\n' (1.14-2.0: )
+Shifting token '\n' (1.14-2.0: )
 Entering state 24
 Stack now 0 8 24
 Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
+   $1 = nterm exp (1.1-13: 7)
+   $2 = token '\n' (1.14-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Stack now 0 7
@@ -153779,2149 +153733,452 @@
 Entering state 6
 Stack now 0 6
 Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.1-2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.1-2: )
-Stack now 0 4
-Shifting token error (1.1-2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1-2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
+Next token is token number (2.1: 1)
+Shifting token number (2.1: 1)
+Entering state 1
+Stack now 0 6 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (2.1: 1)
+-> $$ = nterm exp (2.1: 1)
 Entering state 8
-Stack now 0 8
+Stack now 0 6 8
 Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
+Next token is token '+' (2.3: )
+Shifting token '+' (2.3: )
 Entering state 20
-Stack now 0 8 20
+Stack now 0 6 8 20
 Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
-Entering state 4
-Stack now 0 8 20 4
+Next token is token number (2.5: 2)
+Shifting token number (2.5: 2)
+Entering state 1
+Stack now 0 6 8 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (2.5: 2)
+-> $$ = nterm exp (2.5: 2)
+Entering state 29
+Stack now 0 6 8 20 29
 Reading a token
-1.8: syntax error: invalid character: '#'
-Next token is token error (1.8: )
-Shifting token error (1.1-8: )
-Entering state 11
-Stack now 0 8 20 4 11
-Next token is token invalid token (1.8: )
-Error: discarding token invalid token (1.8: )
-Error: popping token error (1.1-8: )
-Stack now 0 8 20 4
-Shifting token error (1.1-8: )
-Entering state 11
-Stack now 0 8 20 4 11
+Next token is token '*' (2.7: )
+Shifting token '*' (2.7: )
+Entering state 21
+Stack now 0 6 8 20 29 21
 Reading a token
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 25
-Stack now 0 8 20 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.1-8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 29
-Stack now 0 8 20 29
+Next token is token '-' (2.9: )
+Shifting token '-' (2.9: )
+Entering state 2
+Stack now 0 6 8 20 29 21 2
 Reading a token
-Next token is token '=' (1.11: )
+Next token is token number (2.10: 3)
+Shifting token number (2.10: 3)
+Entering state 1
+Stack now 0 6 8 20 29 21 2 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (2.10: 3)
+-> $$ = nterm exp (2.10: 3)
+Entering state 10
+Stack now 0 6 8 20 29 21 2 10
+Reading a token
+Next token is token '=' (2.12: )
+Reducing stack by rule 11 (line 102):
+   $1 = token '-' (2.9: )
+   $2 = nterm exp (2.10: 3)
+-> $$ = nterm exp (2.9-10: -3)
+Entering state 30
+Stack now 0 6 8 20 29 21 30
+Next token is token '=' (2.12: )
+Reducing stack by rule 9 (line 92):
+   $1 = nterm exp (2.5: 2)
+   $2 = token '*' (2.7: )
+   $3 = nterm exp (2.9-10: -3)
+-> $$ = nterm exp (2.5-10: -6)
+Entering state 29
+Stack now 0 6 8 20 29
+Next token is token '=' (2.12: )
 Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
+   $1 = nterm exp (2.1: 1)
+   $2 = token '+' (2.3: )
+   $3 = nterm exp (2.5-10: -6)
+-> $$ = nterm exp (2.1-10: -5)
 Entering state 8
-Stack now 0 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
+Stack now 0 6 8
+Next token is token '=' (2.12: )
+Shifting token '=' (2.12: )
 Entering state 18
-Stack now 0 8 18
+Stack now 0 6 8 18
 Reading a token
-Next token is token number (1.13-16: 2222)
-Shifting token number (1.13-16: 2222)
+Next token is token '-' (2.14: )
+Shifting token '-' (2.14: )
+Entering state 2
+Stack now 0 6 8 18 2
+Reading a token
+Next token is token number (2.15: 5)
+Shifting token number (2.15: 5)
 Entering state 1
-Stack now 0 8 18 1
+Stack now 0 6 8 18 2 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
-Entering state 27
-Stack now 0 8 18 27
+   $1 = token number (2.15: 5)
+-> $$ = nterm exp (2.15: 5)
+Entering state 10
+Stack now 0 6 8 18 2 10
 Reading a token
-Next token is token '\n' (1.17-2.0: )
+Next token is token '\n' (2.16-3.0: )
+Reducing stack by rule 11 (line 102):
+   $1 = token '-' (2.14: )
+   $2 = nterm exp (2.15: 5)
+-> $$ = nterm exp (2.14-15: -5)
+Entering state 27
+Stack now 0 6 8 18 27
+Next token is token '\n' (2.16-3.0: )
 Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
+   $1 = nterm exp (2.1-10: -5)
+   $2 = token '=' (2.12: )
+   $3 = nterm exp (2.14-15: -5)
+-> $$ = nterm exp (2.1-15: -5)
 Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
+Stack now 0 6 8
+Next token is token '\n' (2.16-3.0: )
+Shifting token '\n' (2.16-3.0: )
 Entering state 24
-Stack now 0 8 24
+Stack now 0 6 8 24
 Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+   $1 = nterm exp (2.1-15: -5)
+   $2 = token '\n' (2.16-3.0: )
+-> $$ = nterm line (2.1-3.0: )
+Entering state 17
+Stack now 0 6 17
+Reducing stack by rule 2 (line 70):
+   $1 = nterm input (1.1-2.0: )
+   $2 = nterm line (2.1-3.0: )
+-> $$ = nterm input (1.1-3.0: )
 Entering state 6
 Stack now 0 6
 Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1443: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1443: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1443:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '\n' (3.1-4.0: )
+Shifting token '\n' (3.1-4.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 74):
+   $1 = token '\n' (3.1-4.0: )
+-> $$ = nterm line (3.1-4.0: )
+Entering state 17
+Stack now 0 6 17
+Reducing stack by rule 2 (line 70):
+   $1 = nterm input (1.1-3.0: )
+   $2 = nterm line (3.1-4.0: )
+-> $$ = nterm input (1.1-4.0: )
+Entering state 6
+Stack now 0 6
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
+Next token is token '-' (4.1: )
+Shifting token '-' (4.1: )
+Entering state 2
+Stack now 0 6 2
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
+Next token is token number (4.2: 1)
+Shifting token number (4.2: 1)
 Entering state 1
-Stack now 0 4 1
+Stack now 0 6 2 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
+   $1 = token number (4.2: 1)
+-> $$ = nterm exp (4.2: 1)
+Entering state 10
+Stack now 0 6 2 10
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Stack now 0 4 12 20
+Next token is token '^' (4.3: )
+Shifting token '^' (4.3: )
+Entering state 23
+Stack now 0 6 2 10 23
 Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
+Next token is token number (4.4: 2)
+Shifting token number (4.4: 2)
+Entering state 1
+Stack now 0 6 2 10 23 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (4.4: 2)
+-> $$ = nterm exp (4.4: 2)
+Entering state 32
+Stack now 0 6 2 10 23 32
 Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+Next token is token '=' (4.6: )
+Reducing stack by rule 12 (line 103):
+   $1 = nterm exp (4.2: 1)
+   $2 = token '^' (4.3: )
+   $3 = nterm exp (4.4: 2)
+-> $$ = nterm exp (4.2-4: 1)
+Entering state 10
+Stack now 0 6 2 10
+Next token is token '=' (4.6: )
+Reducing stack by rule 11 (line 102):
+   $1 = token '-' (4.1: )
+   $2 = nterm exp (4.2-4: 1)
+-> $$ = nterm exp (4.1-4: -1)
 Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
+Stack now 0 6 8
+Next token is token '=' (4.6: )
+Shifting token '=' (4.6: )
 Entering state 18
-Stack now 0 8 18
+Stack now 0 6 8 18
 Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
+Next token is token '-' (4.8: )
+Shifting token '-' (4.8: )
+Entering state 2
+Stack now 0 6 8 18 2
+Reading a token
+Next token is token number (4.9: 1)
+Shifting token number (4.9: 1)
 Entering state 1
-Stack now 0 8 18 1
+Stack now 0 6 8 18 2 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 27
-Stack now 0 8 18 27
+   $1 = token number (4.9: 1)
+-> $$ = nterm exp (4.9: 1)
+Entering state 10
+Stack now 0 6 8 18 2 10
 Reading a token
-Next token is token '\n' (1.15-2.0: )
+Next token is token '\n' (4.10-5.0: )
+Reducing stack by rule 11 (line 102):
+   $1 = token '-' (4.8: )
+   $2 = nterm exp (4.9: 1)
+-> $$ = nterm exp (4.8-9: -1)
+Entering state 27
+Stack now 0 6 8 18 27
+Next token is token '\n' (4.10-5.0: )
 Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+   $1 = nterm exp (4.1-4: -1)
+   $2 = token '=' (4.6: )
+   $3 = nterm exp (4.8-9: -1)
+-> $$ = nterm exp (4.1-9: -1)
 Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Stack now 0 6 8
+Next token is token '\n' (4.10-5.0: )
+Shifting token '\n' (4.10-5.0: )
 Entering state 24
-Stack now 0 8 24
+Stack now 0 6 8 24
 Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+   $1 = nterm exp (4.1-9: -1)
+   $2 = token '\n' (4.10-5.0: )
+-> $$ = nterm line (4.1-5.0: )
+Entering state 17
+Stack now 0 6 17
+Reducing stack by rule 2 (line 70):
+   $1 = nterm input (1.1-4.0: )
+   $2 = nterm line (4.1-5.0: )
+-> $$ = nterm input (1.1-5.0: )
 Entering state 6
 Stack now 0 6
 Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
+Next token is token '(' (5.1: )
+Shifting token '(' (5.1: )
 Entering state 4
-Stack now 0 4
+Stack now 0 6 4
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
+Next token is token '-' (5.2: )
+Shifting token '-' (5.2: )
+Entering state 2
+Stack now 0 6 4 2
+Reading a token
+Next token is token number (5.3: 1)
+Shifting token number (5.3: 1)
 Entering state 1
-Stack now 0 4 1
+Stack now 0 6 4 2 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
+   $1 = token number (5.3: 1)
+-> $$ = nterm exp (5.3: 1)
+Entering state 10
+Stack now 0 6 4 2 10
+Reading a token
+Next token is token ')' (5.4: )
+Reducing stack by rule 11 (line 102):
+   $1 = token '-' (5.2: )
+   $2 = nterm exp (5.3: 1)
+-> $$ = nterm exp (5.2-3: -1)
 Entering state 12
-Stack now 0 4 12
+Stack now 0 6 4 12
+Next token is token ')' (5.4: )
+Shifting token ')' (5.4: )
+Entering state 26
+Stack now 0 6 4 12 26
+Reducing stack by rule 13 (line 104):
+   $1 = token '(' (5.1: )
+   $2 = nterm exp (5.2-3: -1)
+   $3 = token ')' (5.4: )
+-> $$ = nterm exp (5.1-4: -1)
+Entering state 8
+Stack now 0 6 8
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Stack now 0 4 12 20
+Next token is token '^' (5.5: )
+Shifting token '^' (5.5: )
+Entering state 23
+Stack now 0 6 8 23
 Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
+Next token is token number (5.6: 2)
+Shifting token number (5.6: 2)
+Entering state 1
+Stack now 0 6 8 23 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (5.6: 2)
+-> $$ = nterm exp (5.6: 2)
+Entering state 32
+Stack now 0 6 8 23 32
 Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+Next token is token '=' (5.8: )
+Reducing stack by rule 12 (line 103):
+   $1 = nterm exp (5.1-4: -1)
+   $2 = token '^' (5.5: )
+   $3 = nterm exp (5.6: 2)
+-> $$ = nterm exp (5.1-6: 1)
 Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
+Stack now 0 6 8
+Next token is token '=' (5.8: )
+Shifting token '=' (5.8: )
 Entering state 18
-Stack now 0 8 18
+Stack now 0 6 8 18
 Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
+Next token is token number (5.10: 1)
+Shifting token number (5.10: 1)
 Entering state 1
-Stack now 0 8 18 1
+Stack now 0 6 8 18 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
+   $1 = token number (5.10: 1)
+-> $$ = nterm exp (5.10: 1)
 Entering state 27
-Stack now 0 8 18 27
+Stack now 0 6 8 18 27
 Reading a token
-Next token is token '\n' (1.15-2.0: )
+Next token is token '\n' (5.11-6.0: )
 Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+   $1 = nterm exp (5.1-6: 1)
+   $2 = token '=' (5.8: )
+   $3 = nterm exp (5.10: 1)
+-> $$ = nterm exp (5.1-10: 1)
 Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Stack now 0 6 8
+Next token is token '\n' (5.11-6.0: )
+Shifting token '\n' (5.11-6.0: )
 Entering state 24
-Stack now 0 8 24
+Stack now 0 6 8 24
 Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+   $1 = nterm exp (5.1-10: 1)
+   $2 = token '\n' (5.11-6.0: )
+-> $$ = nterm line (5.1-6.0: )
+Entering state 17
+Stack now 0 6 17
+Reducing stack by rule 2 (line 70):
+   $1 = nterm input (1.1-5.0: )
+   $2 = nterm line (5.1-6.0: )
+-> $$ = nterm input (1.1-6.0: )
 Entering state 6
 Stack now 0 6
 Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1443: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1443: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1443:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
+Next token is token '\n' (6.1-7.0: )
+Shifting token '\n' (6.1-7.0: )
+Entering state 3
+Stack now 0 6 3
+Reducing stack by rule 3 (line 74):
+   $1 = token '\n' (6.1-7.0: )
+-> $$ = nterm line (6.1-7.0: )
+Entering state 17
+Stack now 0 6 17
+Reducing stack by rule 2 (line 70):
+   $1 = nterm input (1.1-6.0: )
+   $2 = nterm line (6.1-7.0: )
+-> $$ = nterm input (1.1-7.0: )
+Entering state 6
+Stack now 0 6
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
+Next token is token '-' (7.1: )
+Shifting token '-' (7.1: )
+Entering state 2
+Stack now 0 6 2
 Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.1-2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.1-2: )
-Stack now 0 4
-Shifting token error (1.1-2: )
-Entering state 11
-Stack now 0 4 11
+Next token is token '-' (7.2: )
+Shifting token '-' (7.2: )
+Entering state 2
+Stack now 0 6 2 2
 Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Error: popping token error (1.1-2: )
-Stack now 0 4
-Shifting token error (1.1-4: )
-Entering state 11
-Stack now 0 4 11
+Next token is token '-' (7.3: )
+Shifting token '-' (7.3: )
+Entering state 2
+Stack now 0 6 2 2 2
 Reading a token
-Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
-Error: popping token error (1.1-4: )
-Stack now 0 4
-Shifting token error (1.1-6: )
-Entering state 11
-Stack now 0 4 11
+Next token is token number (7.4: 1)
+Shifting token number (7.4: 1)
+Entering state 1
+Stack now 0 6 2 2 2 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (7.4: 1)
+-> $$ = nterm exp (7.4: 1)
+Entering state 10
+Stack now 0 6 2 2 2 10
 Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+Next token is token '=' (7.6: )
+Reducing stack by rule 11 (line 102):
+   $1 = token '-' (7.3: )
+   $2 = nterm exp (7.4: 1)
+-> $$ = nterm exp (7.3-4: -1)
+Entering state 10
+Stack now 0 6 2 2 10
+Next token is token '=' (7.6: )
+Reducing stack by rule 11 (line 102):
+   $1 = token '-' (7.2: )
+   $2 = nterm exp (7.3-4: -1)
+-> $$ = nterm exp (7.2-4: 1)
+Entering state 10
+Stack now 0 6 2 10
+Next token is token '=' (7.6: )
+Reducing stack by rule 11 (line 102):
+   $1 = token '-' (7.1: )
+   $2 = nterm exp (7.2-4: 1)
+-> $$ = nterm exp (7.1-4: -1)
 Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
+Stack now 0 6 8
+Next token is token '=' (7.6: )
+Shifting token '=' (7.6: )
 Entering state 18
-Stack now 0 8 18
+Stack now 0 6 8 18
 Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
+Next token is token '-' (7.8: )
+Shifting token '-' (7.8: )
+Entering state 2
+Stack now 0 6 8 18 2
+Reading a token
+Next token is token number (7.9: 1)
+Shifting token number (7.9: 1)
 Entering state 1
-Stack now 0 8 18 1
+Stack now 0 6 8 18 2 1
 Reducing stack by rule 5 (line 79):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 27
-Stack now 0 8 18 27
+   $1 = token number (7.9: 1)
+-> $$ = nterm exp (7.9: 1)
+Entering state 10
+Stack now 0 6 8 18 2 10
 Reading a token
-Next token is token '\n' (1.15-2.0: )
+Next token is token '\n' (7.10-8.0: )
+Reducing stack by rule 11 (line 102):
+   $1 = token '-' (7.8: )
+   $2 = nterm exp (7.9: 1)
+-> $$ = nterm exp (7.8-9: -1)
+Entering state 27
+Stack now 0 6 8 18 27
+Next token is token '\n' (7.10-8.0: )
 Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+   $1 = nterm exp (7.1-4: -1)
+   $2 = token '=' (7.6: )
+   $3 = nterm exp (7.8-9: -1)
+-> $$ = nterm exp (7.1-9: -1)
 Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Stack now 0 6 8
+Next token is token '\n' (7.10-8.0: )
+Shifting token '\n' (7.10-8.0: )
 Entering state 24
-Stack now 0 8 24
+Stack now 0 6 8 24
 Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.1-2: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.2: )
-Error: discarding token invalid token (1.2: )
-Error: popping token error (1.1-2: )
-Stack now 0 4
-Shifting token error (1.1-2: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Error: popping token error (1.1-2: )
-Stack now 0 4
-Shifting token error (1.1-4: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
-Error: popping token error (1.1-4: )
-Stack now 0 4
-Shifting token error (1.1-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.1-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 18
-Stack now 0 8 18
-Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 27
-Stack now 0 8 18 27
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1443: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1443: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1443:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Stack now 0 4 12 20
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-8: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
-Error: popping token error (1.2-8: )
-Stack now 0 4
-Shifting token error (1.2-10: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 18
-Stack now 0 8 18
-Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 27
-Stack now 0 8 18 27
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Stack now 0 4 12 20
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Stack now 0 4 12
-Error: popping nterm exp (1.2: 1)
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Next token is token invalid token (1.6: )
-Error: discarding token invalid token (1.6: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-6: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Error: popping token error (1.2-6: )
-Stack now 0 4
-Shifting token error (1.2-8: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
-Error: popping token error (1.2-8: )
-Stack now 0 4
-Shifting token error (1.2-10: )
-Entering state 11
-Stack now 0 4 11
-Reading a token
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 25
-Stack now 0 4 11 25
-Reducing stack by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 18
-Stack now 0 8 18
-Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 27
-Stack now 0 8 18 27
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1443: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1443: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1443:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Stack now 0 4 12 20
-Reading a token
-Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
-Entering state 1
-Stack now 0 4 12 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 29
-Stack now 0 4 12 20 29
-Reading a token
-Next token is token ')' (1.7: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 12 26
-Reducing stack by rule 13 (line 104):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 22
-Stack now 0 8 22
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Stack now 0 8 22 4
-Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
-Entering state 1
-Stack now 0 8 22 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Stack now 0 8 22 4 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 19
-Stack now 0 8 22 4 12 19
-Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
-Entering state 1
-Stack now 0 8 22 4 12 19 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 28
-Stack now 0 8 22 4 12 19 28
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack by rule 8 (line 91):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Stack now 0 8 22 4 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Stack now 0 8 22 4 12 26
-Reducing stack by rule 13 (line 104):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 31
-Stack now 0 8 22 31
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 10 (line 93):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Stack now 0 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Stack now 0 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Stack now 0 4 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Stack now 0 4 12 20
-Reading a token
-Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
-Entering state 1
-Stack now 0 4 12 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 29
-Stack now 0 4 12 20 29
-Reading a token
-Next token is token ')' (1.7: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Stack now 0 4 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Stack now 0 4 12 26
-Reducing stack by rule 13 (line 104):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 22
-Stack now 0 8 22
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Stack now 0 8 22 4
-Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
-Entering state 1
-Stack now 0 8 22 4 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Stack now 0 8 22 4 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 19
-Stack now 0 8 22 4 12 19
-Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
-Entering state 1
-Stack now 0 8 22 4 12 19 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 28
-Stack now 0 8 22 4 12 19 28
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack by rule 8 (line 91):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Stack now 0 8 22 4 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Stack now 0 8 22 4 12 26
-Reducing stack by rule 13 (line 104):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 31
-Stack now 0 8 22 31
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack by rule 10 (line 93):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token end of input (2.1: )
-Shifting token end of input (2.1: )
-Entering state 16
-Stack now 0 6 16
-Stack now 0 6 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1443: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1443: cat stderr
-532. calc.at:1443:  ok
-
-541. calc.at:1457: testing Calculator C++ parse.error=custom  ...
-./calc.at:1457: mv calc.y.tmp calc.y
-
-./calc.at:1457: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1457: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
-stderr:
-stdout:
-./calc.at:1453: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc
-
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1453:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1453: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-stderr:
-  | 1 2
-stdout:
-./calc.at:1453:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1448: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc calc.hh
-
-1.3: syntax error, unexpected number
-./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.3: syntax error, unexpected number
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1448:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1453: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1453: cat stderr
-./calc.at:1448: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-input:
-  | 1 2
-./calc.at:1448:  $PREPARSER ./calc  input
-  | 1//2
-./calc.at:1453:  $PREPARSER ./calc  input
-stderr:
-1.3: syntax error
-stderr:
-./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-1.3: syntax error
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1453: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1448: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1453: cat stderr
-input:
-  | error
-./calc.at:1453:  $PREPARSER ./calc  input
-./calc.at:1448: cat stderr
-stderr:
-1.1: syntax error, unexpected invalid token
-./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 1//2
-./calc.at:1448:  $PREPARSER ./calc  input
-stderr:
-stderr:
-1.3: syntax error
-1.1: syntax error, unexpected invalid token
-./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1453: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-1.3: syntax error
-stderr:
-stdout:
-./calc.at:1453: cat stderr
-./calc.at:1454: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc
-
-./calc.at:1448: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1454:  $PREPARSER ./calc  input
-input:
-  | 1 = 2 = 3
-./calc.at:1453:  $PREPARSER ./calc  input
-stderr:
-stderr:
-./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.7: syntax error, unexpected '='
-./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1448: cat stderr
-stderr:
-stderr:
-./calc.at:1454: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-1.7: syntax error, unexpected '='
-input:
-  | error
-./calc.at:1448:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1453: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-  | 1 2
-1.1: syntax error
-./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1454:  $PREPARSER ./calc  input
-stderr:
-stderr:
-./calc.at:1453: cat stderr
-1.3: syntax error, unexpected number
-./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.1: syntax error
-stderr:
-input:
-1.3: syntax error, unexpected number
-  | 
-  | +1
-./calc.at:1453:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1448: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1454: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-2.1: syntax error, unexpected '+'
-./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-2.1: syntax error, unexpected '+'
-./calc.at:1454: cat stderr
-./calc.at:1448: cat stderr
-./calc.at:1453: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-  | 1//2
-input:
-./calc.at:1454:  $PREPARSER ./calc  input
-  | 1 = 2 = 3
-./calc.at:1448:  $PREPARSER ./calc  input
-./calc.at:1453: cat stderr
-stderr:
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1453:  $PREPARSER ./calc  /dev/null
-1.7: syntax error
-./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1454: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-1.1: syntax error, unexpected end of input
-./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-1.7: syntax error
-1.1: syntax error, unexpected end of input
-./calc.at:1454: cat stderr
-./calc.at:1448: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1453: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-  | error
-./calc.at:1454:  $PREPARSER ./calc  input
-./calc.at:1453: cat stderr
-stderr:
-input:
-./calc.at:1448: cat stderr
-1.1: syntax error, unexpected invalid token
-./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1453:  $PREPARSER ./calc  input
-input:
-1.1: syntax error, unexpected invalid token
-stderr:
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.1-46: error: 4444 != 1
-  | 
-  | +1
-./calc.at:1448:  $PREPARSER ./calc  input
-./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1454: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-stderr:
-2.1: syntax error
-./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.1-46: error: 4444 != 1
-./calc.at:1454: cat stderr
-stderr:
-2.1: syntax error
-./calc.at:1453: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1453: cat stderr
-./calc.at:1448: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-  | 1 = 2 = 3
-./calc.at:1454:  $PREPARSER ./calc  input
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1453:  $PREPARSER ./calc  input
-stderr:
-1.7: syntax error, unexpected '='
-stderr:
-./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
-./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1448: cat stderr
-stderr:
-stderr:
-stderr:
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
-stdout:
-1.7: syntax error, unexpected '='
-./calc.at:1446: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc calc.hh
-
-./calc.at:1448:  $PREPARSER ./calc  /dev/null
-stderr:
-1.1: syntax error
-./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1454: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-./calc.at:1453: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1446:  $PREPARSER ./calc  input
-./calc.at:1454: cat stderr
-1.1: syntax error
-./calc.at:1453: cat stderr
-input:
-stderr:
-./calc.at:1448: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-  | 
-  | +1
-./calc.at:1454:  $PREPARSER ./calc  input
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Stack now 0 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
-Stack now 0 8 20
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Stack now 0 8 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 29
-Stack now 0 8 20 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
-Stack now 0 8 20 29 21
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Stack now 0 8 20 29 21 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Stack now 0 8 20 29 21 30
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Stack now 0 8 20 29
-Next token is token '=' (1.11: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 18
-Stack now 0 8 18
-Reading a token
-Next token is token number (1.13: 7)
-Shifting token number (1.13: 7)
-Entering state 1
-Stack now 0 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (1.13: 7)
--> $$ = nterm exp (1.13: 7)
-Entering state 27
-Stack now 0 8 18 27
-Reading a token
-Next token is token '\n' (1.14-2.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (1.1-9: 7)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13: 7)
--> $$ = nterm exp (1.1-13: 7)
-Entering state 8
-Stack now 0 8
-Next token is token '\n' (1.14-2.0: )
-Shifting token '\n' (1.14-2.0: )
-Entering state 24
-Stack now 0 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (1.1-13: 7)
-   $2 = token '\n' (1.14-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Stack now 0 7
-Reducing stack by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token number (2.1: 1)
-Shifting token number (2.1: 1)
-Entering state 1
-Stack now 0 6 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (2.1: 1)
--> $$ = nterm exp (2.1: 1)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '+' (2.3: )
-Shifting token '+' (2.3: )
-Entering state 20
-Stack now 0 6 8 20
-Reading a token
-Next token is token number (2.5: 2)
-Shifting token number (2.5: 2)
-Entering state 1
-Stack now 0 6 8 20 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (2.5: 2)
--> $$ = nterm exp (2.5: 2)
-Entering state 29
-Stack now 0 6 8 20 29
-Reading a token
-Next token is token '*' (2.7: )
-Shifting token '*' (2.7: )
-Entering state 21
-Stack now 0 6 8 20 29 21
-Reading a token
-Next token is token '-' (2.9: )
-Shifting token '-' (2.9: )
-Entering state 2
-Stack now 0 6 8 20 29 21 2
-Reading a token
-Next token is token number (2.10: 3)
-Shifting token number (2.10: 3)
-Entering state 1
-Stack now 0 6 8 20 29 21 2 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (2.10: 3)
--> $$ = nterm exp (2.10: 3)
-Entering state 10
-Stack now 0 6 8 20 29 21 2 10
-Reading a token
-Next token is token '=' (2.12: )
-Reducing stack by rule 11 (line 102):
-   $1 = token '-' (2.9: )
-   $2 = nterm exp (2.10: 3)
--> $$ = nterm exp (2.9-10: -3)
-Entering state 30
-Stack now 0 6 8 20 29 21 30
-Next token is token '=' (2.12: )
-Reducing stack by rule 9 (line 92):
-   $1 = nterm exp (2.5: 2)
-   $2 = token '*' (2.7: )
-   $3 = nterm exp (2.9-10: -3)
--> $$ = nterm exp (2.5-10: -6)
-Entering state 29
-Stack now 0 6 8 20 29
-Next token is token '=' (2.12: )
-Reducing stack by rule 7 (line 90):
-   $1 = nterm exp (2.1: 1)
-   $2 = token '+' (2.3: )
-   $3 = nterm exp (2.5-10: -6)
--> $$ = nterm exp (2.1-10: -5)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (2.12: )
-Shifting token '=' (2.12: )
-Entering state 18
-Stack now 0 6 8 18
-Reading a token
-Next token is token '-' (2.14: )
-Shifting token '-' (2.14: )
-Entering state 2
-Stack now 0 6 8 18 2
-Reading a token
-Next token is token number (2.15: 5)
-Shifting token number (2.15: 5)
-Entering state 1
-Stack now 0 6 8 18 2 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (2.15: 5)
--> $$ = nterm exp (2.15: 5)
-Entering state 10
-Stack now 0 6 8 18 2 10
-Reading a token
-Next token is token '\n' (2.16-3.0: )
-Reducing stack by rule 11 (line 102):
-   $1 = token '-' (2.14: )
-   $2 = nterm exp (2.15: 5)
--> $$ = nterm exp (2.14-15: -5)
-Entering state 27
-Stack now 0 6 8 18 27
-Next token is token '\n' (2.16-3.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (2.1-10: -5)
-   $2 = token '=' (2.12: )
-   $3 = nterm exp (2.14-15: -5)
--> $$ = nterm exp (2.1-15: -5)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (2.16-3.0: )
-Shifting token '\n' (2.16-3.0: )
-Entering state 24
-Stack now 0 6 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (2.1-15: -5)
-   $2 = token '\n' (2.16-3.0: )
--> $$ = nterm line (2.1-3.0: )
-Entering state 17
-Stack now 0 6 17
-Reducing stack by rule 2 (line 70):
-   $1 = nterm input (1.1-2.0: )
-   $2 = nterm line (2.1-3.0: )
--> $$ = nterm input (1.1-3.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '\n' (3.1-4.0: )
-Shifting token '\n' (3.1-4.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 74):
-   $1 = token '\n' (3.1-4.0: )
--> $$ = nterm line (3.1-4.0: )
-Entering state 17
-Stack now 0 6 17
-Reducing stack by rule 2 (line 70):
-   $1 = nterm input (1.1-3.0: )
-   $2 = nterm line (3.1-4.0: )
--> $$ = nterm input (1.1-4.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '-' (4.1: )
-Shifting token '-' (4.1: )
-Entering state 2
-Stack now 0 6 2
-Reading a token
-Next token is token number (4.2: 1)
-Shifting token number (4.2: 1)
-Entering state 1
-Stack now 0 6 2 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (4.2: 1)
--> $$ = nterm exp (4.2: 1)
-Entering state 10
-Stack now 0 6 2 10
-Reading a token
-Next token is token '^' (4.3: )
-Shifting token '^' (4.3: )
-Entering state 23
-Stack now 0 6 2 10 23
-Reading a token
-Next token is token number (4.4: 2)
-Shifting token number (4.4: 2)
-Entering state 1
-Stack now 0 6 2 10 23 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (4.4: 2)
--> $$ = nterm exp (4.4: 2)
-Entering state 32
-Stack now 0 6 2 10 23 32
-Reading a token
-Next token is token '=' (4.6: )
-Reducing stack by rule 12 (line 103):
-   $1 = nterm exp (4.2: 1)
-   $2 = token '^' (4.3: )
-   $3 = nterm exp (4.4: 2)
--> $$ = nterm exp (4.2-4: 1)
-Entering state 10
-Stack now 0 6 2 10
-Next token is token '=' (4.6: )
-Reducing stack by rule 11 (line 102):
-   $1 = token '-' (4.1: )
-   $2 = nterm exp (4.2-4: 1)
--> $$ = nterm exp (4.1-4: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (4.6: )
-Shifting token '=' (4.6: )
-Entering state 18
-Stack now 0 6 8 18
-Reading a token
-Next token is token '-' (4.8: )
-Shifting token '-' (4.8: )
-Entering state 2
-Stack now 0 6 8 18 2
-Reading a token
-Next token is token number (4.9: 1)
-Shifting token number (4.9: 1)
-Entering state 1
-Stack now 0 6 8 18 2 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (4.9: 1)
--> $$ = nterm exp (4.9: 1)
-Entering state 10
-Stack now 0 6 8 18 2 10
-Reading a token
-Next token is token '\n' (4.10-5.0: )
-Reducing stack by rule 11 (line 102):
-   $1 = token '-' (4.8: )
-   $2 = nterm exp (4.9: 1)
--> $$ = nterm exp (4.8-9: -1)
-Entering state 27
-Stack now 0 6 8 18 27
-Next token is token '\n' (4.10-5.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (4.1-4: -1)
-   $2 = token '=' (4.6: )
-   $3 = nterm exp (4.8-9: -1)
--> $$ = nterm exp (4.1-9: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (4.10-5.0: )
-Shifting token '\n' (4.10-5.0: )
-Entering state 24
-Stack now 0 6 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (4.1-9: -1)
-   $2 = token '\n' (4.10-5.0: )
--> $$ = nterm line (4.1-5.0: )
-Entering state 17
-Stack now 0 6 17
-Reducing stack by rule 2 (line 70):
-   $1 = nterm input (1.1-4.0: )
-   $2 = nterm line (4.1-5.0: )
--> $$ = nterm input (1.1-5.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '(' (5.1: )
-Shifting token '(' (5.1: )
-Entering state 4
-Stack now 0 6 4
-Reading a token
-Next token is token '-' (5.2: )
-Shifting token '-' (5.2: )
-Entering state 2
-Stack now 0 6 4 2
-Reading a token
-Next token is token number (5.3: 1)
-Shifting token number (5.3: 1)
-Entering state 1
-Stack now 0 6 4 2 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (5.3: 1)
--> $$ = nterm exp (5.3: 1)
-Entering state 10
-Stack now 0 6 4 2 10
-Reading a token
-Next token is token ')' (5.4: )
-Reducing stack by rule 11 (line 102):
-   $1 = token '-' (5.2: )
-   $2 = nterm exp (5.3: 1)
--> $$ = nterm exp (5.2-3: -1)
-Entering state 12
-Stack now 0 6 4 12
-Next token is token ')' (5.4: )
-Shifting token ')' (5.4: )
-Entering state 26
-Stack now 0 6 4 12 26
-Reducing stack by rule 13 (line 104):
-   $1 = token '(' (5.1: )
-   $2 = nterm exp (5.2-3: -1)
-   $3 = token ')' (5.4: )
--> $$ = nterm exp (5.1-4: -1)
-Entering state 8
-Stack now 0 6 8
-Reading a token
-Next token is token '^' (5.5: )
-Shifting token '^' (5.5: )
-Entering state 23
-Stack now 0 6 8 23
-Reading a token
-Next token is token number (5.6: 2)
-Shifting token number (5.6: 2)
-Entering state 1
-Stack now 0 6 8 23 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (5.6: 2)
--> $$ = nterm exp (5.6: 2)
-Entering state 32
-Stack now 0 6 8 23 32
-Reading a token
-Next token is token '=' (5.8: )
-Reducing stack by rule 12 (line 103):
-   $1 = nterm exp (5.1-4: -1)
-   $2 = token '^' (5.5: )
-   $3 = nterm exp (5.6: 2)
--> $$ = nterm exp (5.1-6: 1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (5.8: )
-Shifting token '=' (5.8: )
-Entering state 18
-Stack now 0 6 8 18
-Reading a token
-Next token is token number (5.10: 1)
-Shifting token number (5.10: 1)
-Entering state 1
-Stack now 0 6 8 18 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (5.10: 1)
--> $$ = nterm exp (5.10: 1)
-Entering state 27
-Stack now 0 6 8 18 27
-Reading a token
-Next token is token '\n' (5.11-6.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (5.1-6: 1)
-   $2 = token '=' (5.8: )
-   $3 = nterm exp (5.10: 1)
--> $$ = nterm exp (5.1-10: 1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (5.11-6.0: )
-Shifting token '\n' (5.11-6.0: )
-Entering state 24
-Stack now 0 6 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (5.1-10: 1)
-   $2 = token '\n' (5.11-6.0: )
--> $$ = nterm line (5.1-6.0: )
-Entering state 17
-Stack now 0 6 17
-Reducing stack by rule 2 (line 70):
-   $1 = nterm input (1.1-5.0: )
-   $2 = nterm line (5.1-6.0: )
--> $$ = nterm input (1.1-6.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '\n' (6.1-7.0: )
-Shifting token '\n' (6.1-7.0: )
-Entering state 3
-Stack now 0 6 3
-Reducing stack by rule 3 (line 74):
-   $1 = token '\n' (6.1-7.0: )
--> $$ = nterm line (6.1-7.0: )
-Entering state 17
-Stack now 0 6 17
-Reducing stack by rule 2 (line 70):
-   $1 = nterm input (1.1-6.0: )
-   $2 = nterm line (6.1-7.0: )
--> $$ = nterm input (1.1-7.0: )
-Entering state 6
-Stack now 0 6
-Reading a token
-Next token is token '-' (7.1: )
-Shifting token '-' (7.1: )
-Entering state 2
-Stack now 0 6 2
-Reading a token
-Next token is token '-' (7.2: )
-Shifting token '-' (7.2: )
-Entering state 2
-Stack now 0 6 2 2
-Reading a token
-Next token is token '-' (7.3: )
-Shifting token '-' (7.3: )
-Entering state 2
-Stack now 0 6 2 2 2
-Reading a token
-Next token is token number (7.4: 1)
-Shifting token number (7.4: 1)
-Entering state 1
-Stack now 0 6 2 2 2 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (7.4: 1)
--> $$ = nterm exp (7.4: 1)
-Entering state 10
-Stack now 0 6 2 2 2 10
-Reading a token
-Next token is token '=' (7.6: )
-Reducing stack by rule 11 (line 102):
-   $1 = token '-' (7.3: )
-   $2 = nterm exp (7.4: 1)
--> $$ = nterm exp (7.3-4: -1)
-Entering state 10
-Stack now 0 6 2 2 10
-Next token is token '=' (7.6: )
-Reducing stack by rule 11 (line 102):
-   $1 = token '-' (7.2: )
-   $2 = nterm exp (7.3-4: -1)
--> $$ = nterm exp (7.2-4: 1)
-Entering state 10
-Stack now 0 6 2 10
-Next token is token '=' (7.6: )
-Reducing stack by rule 11 (line 102):
-   $1 = token '-' (7.1: )
-   $2 = nterm exp (7.2-4: 1)
--> $$ = nterm exp (7.1-4: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '=' (7.6: )
-Shifting token '=' (7.6: )
-Entering state 18
-Stack now 0 6 8 18
-Reading a token
-Next token is token '-' (7.8: )
-Shifting token '-' (7.8: )
-Entering state 2
-Stack now 0 6 8 18 2
-Reading a token
-Next token is token number (7.9: 1)
-Shifting token number (7.9: 1)
-Entering state 1
-Stack now 0 6 8 18 2 1
-Reducing stack by rule 5 (line 79):
-   $1 = token number (7.9: 1)
--> $$ = nterm exp (7.9: 1)
-Entering state 10
-Stack now 0 6 8 18 2 10
-Reading a token
-Next token is token '\n' (7.10-8.0: )
-Reducing stack by rule 11 (line 102):
-   $1 = token '-' (7.8: )
-   $2 = nterm exp (7.9: 1)
--> $$ = nterm exp (7.8-9: -1)
-Entering state 27
-Stack now 0 6 8 18 27
-Next token is token '\n' (7.10-8.0: )
-Reducing stack by rule 6 (line 80):
-   $1 = nterm exp (7.1-4: -1)
-   $2 = token '=' (7.6: )
-   $3 = nterm exp (7.8-9: -1)
--> $$ = nterm exp (7.1-9: -1)
-Entering state 8
-Stack now 0 6 8
-Next token is token '\n' (7.10-8.0: )
-Shifting token '\n' (7.10-8.0: )
-Entering state 24
-Stack now 0 6 8 24
-Reducing stack by rule 4 (line 75):
-   $1 = nterm exp (7.1-9: -1)
-   $2 = token '\n' (7.10-8.0: )
--> $$ = nterm line (7.1-8.0: )
-Entering state 17
-Stack now 0 6 17
-Reducing stack by rule 2 (line 70):
-   $1 = nterm input (1.1-7.0: )
-   $2 = nterm line (7.1-8.0: )
--> $$ = nterm input (1.1-8.0: )
+   $1 = nterm exp (7.1-9: -1)
+   $2 = token '\n' (7.10-8.0: )
+-> $$ = nterm line (7.1-8.0: )
+Entering state 17
+Stack now 0 6 17
+Reducing stack by rule 2 (line 70):
+   $1 = nterm input (1.1-7.0: )
+   $2 = nterm line (7.1-8.0: )
+-> $$ = nterm input (1.1-8.0: )
 Entering state 6
 Stack now 0 6
 Reading a token
@@ -156394,15 +154651,97 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
-input:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Stack now 0 8 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Stack now 0 8 20 29 21
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Stack now 0 8 20 29 21 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Stack now 0 8 20 29 21 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Stack now 0 8 20 29
+Next token is token '+' (1.11: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Stack now 0 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Stack now 0 8 20 5
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Stack now 0 8 20 5 13
+Reducing stack by rule 18 (line 109):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Stack now 0 8 20
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
 ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-  | (- *) + (1 2) = 1
-stderr:
-./calc.at:1453:  $PREPARSER ./calc  input
-./calc.at:1448: cat stderr
-2.1: syntax error, unexpected '+'
-./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1443: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -157422,46 +155761,13 @@
 Cleanup: popping token end of input (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
 ./calc.at:1446: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
-input:
-./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1448:  $PREPARSER ./calc  input
-stderr:
-stderr:
-2.1: syntax error, unexpected '+'
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
-./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+./calc.at:1443: cat stderr
 input:
   | 1 2
 ./calc.at:1446:  $PREPARSER ./calc  input
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
-stderr:
-./calc.at:1454: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+input:
 stderr:
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
+  | (#) + (#) = 2222
 Starting parse
 Entering state 0
 Stack now 0
@@ -157482,28 +155788,133 @@
 Stack now 0
 Cleanup: discarding lookahead token number (1.3: 2)
 Stack now 0
-./calc.at:1453: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1443:  $PREPARSER ./calc  input
 ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1454: cat stderr
-./calc.at:1448: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.1-2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.1-2: )
+Stack now 0 4
+Shifting token error (1.1-2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1-2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
+Entering state 4
+Stack now 0 8 20 4
+Reading a token
+1.8: syntax error: invalid character: '#'
+Next token is token error (1.8: )
+Shifting token error (1.1-8: )
+Entering state 11
+Stack now 0 8 20 4 11
+Next token is token invalid token (1.8: )
+Error: discarding token invalid token (1.8: )
+Error: popping token error (1.1-8: )
+Stack now 0 8 20 4
+Shifting token error (1.1-8: )
+Entering state 11
+Stack now 0 8 20 4 11
+Reading a token
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
+Entering state 25
+Stack now 0 8 20 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.7: )
+   $2 = token error (1.1-8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1.13-16: 2222)
+Shifting token number (1.13-16: 2222)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -157525,9 +155936,130 @@
 Stack now 0
 Cleanup: discarding lookahead token number (1.3: 2)
 Stack now 0
-./calc.at:1454:  $PREPARSER ./calc  /dev/null
-./calc.at:1453: cat stderr
 stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.1-2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.1-2: )
+Stack now 0 4
+Shifting token error (1.1-2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1-2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
+Entering state 20
+Stack now 0 8 20
+Reading a token
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
+Entering state 4
+Stack now 0 8 20 4
+Reading a token
+1.8: syntax error: invalid character: '#'
+Next token is token error (1.8: )
+Shifting token error (1.1-8: )
+Entering state 11
+Stack now 0 8 20 4 11
+Next token is token invalid token (1.8: )
+Error: discarding token invalid token (1.8: )
+Error: popping token error (1.1-8: )
+Stack now 0 8 20 4
+Shifting token error (1.1-8: )
+Entering state 11
+Stack now 0 8 20 4 11
+Reading a token
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
+Entering state 25
+Stack now 0 8 20 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.7: )
+   $2 = token error (1.1-8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
+Entering state 29
+Stack now 0 8 20 29
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1.13-16: 2222)
+Shifting token number (1.13-16: 2222)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1446: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -157538,24 +156070,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1448: cat stderr
-1.1: syntax error, unexpected end of file
-./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-input:
-  | (!!) + (1 2) = 1
-stderr:
-./calc.at:1448:  $PREPARSER ./calc  input
-  | (* *) + (*) + (*)
-./calc.at:1453:  $PREPARSER ./calc  input
-1.1: syntax error, unexpected end of file
-./calc.at:1446: cat stderr
-stderr:
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1454: "$PERL" -pi -e 'use strict;
+./calc.at:1443: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -157565,18 +156080,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1446: cat stderr
 input:
-stderr:
+./calc.at:1443: cat stderr
   | 1//2
 ./calc.at:1446:  $PREPARSER ./calc  input
-1.11: syntax error
-1.1-16: error: 2222 != 1
-./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 stderr:
-stderr:
-1.11: syntax error
-1.1-16: error: 2222 != 1
 Starting parse
 Entering state 0
 Stack now 0
@@ -157604,22 +156113,111 @@
 Stack now 0
 Cleanup: discarding lookahead token '/' (1.3: )
 Stack now 0
-./calc.at:1454: cat stderr
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+input:
+  | (1 + #) = 1111
+./calc.at:1443:  $PREPARSER ./calc  input
 ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1448: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 stderr:
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Stack now 0 4 12 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 Starting parse
 Entering state 0
 Stack now 0
@@ -157647,22 +156245,8 @@
 Stack now 0
 Cleanup: discarding lookahead token '/' (1.3: )
 Stack now 0
-input:
-./calc.at:1453: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1454:  $PREPARSER ./calc  input
-./calc.at:1453: cat stderr
+./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1448: cat stderr
 ./calc.at:1446: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -157673,38 +156257,107 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.1-46: error: 4444 != 1
-./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1448:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Stack now 0 4 12 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1446: cat stderr
-stderr:
-input:
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1453:  $PREPARSER ./calc  input
-stderr:
-stderr:
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-stderr:
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.1-46: error: 4444 != 1
-./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-./calc.at:1448: "$PERL" -pi -e 'use strict;
+./calc.at:1443: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -157714,22 +156367,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
   | error
 ./calc.at:1446:  $PREPARSER ./calc  input
+./calc.at:1443: cat stderr
 stderr:
-stderr:
-./calc.at:1454: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1448: cat stderr
-./calc.at:1453: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -157738,18 +156380,11 @@
 1.1: syntax error, unexpected invalid token
 Cleanup: discarding lookahead token invalid token (1.1: )
 Stack now 0
-input:
-./calc.at:1454: cat stderr
 ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (* *) + (*) + (*)
-./calc.at:1448:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
 input:
-./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+  | (# + 1) = 1111
+./calc.at:1443:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -157759,22 +156394,102 @@
 Cleanup: discarding lookahead token invalid token (1.1: )
 Stack now 0
 stderr:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1453:  $PREPARSER ./calc  input
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-input:
-stderr:
-./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (!!) + (1 2) = 1
-./calc.at:1454:  $PREPARSER ./calc  input
-stderr:
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
-./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.1-2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.1-2: )
+Stack now 0 4
+Shifting token error (1.1-2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Error: popping token error (1.1-2: )
+Stack now 0 4
+Shifting token error (1.1-4: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.6: 1)
+Error: discarding token number (1.6: 1)
+Error: popping token error (1.1-4: )
+Stack now 0 4
+Shifting token error (1.1-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1446: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -157785,10 +156500,110 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.11: syntax error, unexpected number
-1.1-16: error: 2222 != 1
+./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1446: cat stderr
-./calc.at:1448: "$PERL" -pi -e 'use strict;
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.1-2: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.2: )
+Error: discarding token invalid token (1.2: )
+Error: popping token error (1.1-2: )
+Stack now 0 4
+Shifting token error (1.1-2: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Error: popping token error (1.1-2: )
+Stack now 0 4
+Shifting token error (1.1-4: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.6: 1)
+Error: discarding token number (1.6: 1)
+Error: popping token error (1.1-4: )
+Stack now 0 4
+Shifting token error (1.1-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.1-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+input:
+  | 1 = 2 = 3
+./calc.at:1446:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1443: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -157798,10 +156613,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | 1 = 2 = 3
-./calc.at:1446:  $PREPARSER ./calc  input
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -157841,35 +156652,9 @@
 Stack now 0
 Cleanup: discarding lookahead token '=' (1.7: )
 Stack now 0
-./calc.at:1453: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1454: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1448: cat stderr
 ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1443: cat stderr
 stderr:
-./calc.at:1453: cat stderr
-./calc.at:1454: cat stderr
-input:
-input:
-  | (#) + (#) = 2222
-./calc.at:1453:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -157909,21 +156694,9 @@
 Stack now 0
 Cleanup: discarding lookahead token '=' (1.7: )
 Stack now 0
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1448:  $PREPARSER ./calc  input
 input:
-stderr:
-./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (- *) + (1 2) = 1
-./calc.at:1454:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-stderr:
+  | (1 + # + 1) = 1111
+./calc.at:1443:  $PREPARSER ./calc  input
 stderr:
 ./calc.at:1446: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
@@ -157935,34 +156708,244 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1448: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
-./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1453: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Stack now 0 4 12 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-8: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.10: 1)
+Error: discarding token number (1.10: 1)
+Error: popping token error (1.2-8: )
+Stack now 0 4
+Shifting token error (1.2-10: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1446: cat stderr
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.12: syntax error, unexpected number
-1.1-17: error: 2222 != 1
-input:
+./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Stack now 0 4 12 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Stack now 0 4 12
+Error: popping nterm exp (1.2: 1)
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Next token is token invalid token (1.6: )
+Error: discarding token invalid token (1.6: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-6: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Error: popping token error (1.2-6: )
+Stack now 0 4
+Shifting token error (1.2-8: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token number (1.10: 1)
+Error: discarding token number (1.10: 1)
+Error: popping token error (1.2-8: )
+Stack now 0 4
+Shifting token error (1.2-10: )
+Entering state 11
+Stack now 0 4 11
+Reading a token
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 25
+Stack now 0 4 11 25
+Reducing stack by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 18
+Stack now 0 8 18
+Reading a token
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
+Entering state 1
+Stack now 0 8 18 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 27
+Stack now 0 8 18 27
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack by rule 6 (line 80):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 input:
-./calc.at:1453: cat stderr
   | 
   | +1
 ./calc.at:1446:  $PREPARSER ./calc  input
-  | 1 + 2 * 3 + !- ++
-./calc.at:1454: "$PERL" -pi -e 'use strict;
+./calc.at:1443: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -157972,9 +156955,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1448:  $PREPARSER ./calc  input
-stderr:
 stderr:
+./calc.at:1443: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -158000,22 +156982,12 @@
 Stack now 0
 Cleanup: discarding lookahead token '+' (2.1: )
 Stack now 0
-./calc.at:1454: cat stderr
-./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1453:  $PREPARSER ./calc  input
-stderr:
-stderr:
 input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1443:  $PREPARSER ./calc  input
 stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (* *) + (*) + (*)
 stderr:
-./calc.at:1454:  $PREPARSER ./calc  input
-1.6: syntax error: invalid character: '#'
 Starting parse
 Entering state 0
 Stack now 0
@@ -158041,22 +157013,290 @@
 Stack now 0
 Cleanup: discarding lookahead token '+' (2.1: )
 Stack now 0
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Stack now 0 4 12 20
+Reading a token
+Next token is token number (1.6: 1)
+Shifting token number (1.6: 1)
+Entering state 1
+Stack now 0 4 12 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 29
+Stack now 0 4 12 20 29
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 12 26
+Reducing stack by rule 13 (line 104):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 22
+Stack now 0 8 22
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Stack now 0 8 22 4
+Reading a token
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
+Entering state 1
+Stack now 0 8 22 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Stack now 0 8 22 4 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 19
+Stack now 0 8 22 4 12 19
+Reading a token
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Stack now 0 8 22 4 12 19 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
+Stack now 0 8 22 4 12 19 28
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack by rule 8 (line 91):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Stack now 0 8 22 4 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Stack now 0 8 22 4 12 26
+Reducing stack by rule 13 (line 104):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 31
+Stack now 0 8 22 31
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 10 (line 93):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-./calc.at:1448: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Stack now 0 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Stack now 0 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Stack now 0 4 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Stack now 0 4 12 20
+Reading a token
+Next token is token number (1.6: 1)
+Shifting token number (1.6: 1)
+Entering state 1
+Stack now 0 4 12 20 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 29
+Stack now 0 4 12 20 29
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack by rule 7 (line 90):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Stack now 0 4 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Stack now 0 4 12 26
+Reducing stack by rule 13 (line 104):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Stack now 0 8
+Reading a token
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 22
+Stack now 0 8 22
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Stack now 0 8 22 4
+Reading a token
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
+Entering state 1
+Stack now 0 8 22 4 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Stack now 0 8 22 4 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 19
+Stack now 0 8 22 4 12 19
+Reading a token
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Stack now 0 8 22 4 12 19 1
+Reducing stack by rule 5 (line 79):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
+Stack now 0 8 22 4 12 19 28
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack by rule 8 (line 91):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Stack now 0 8 22 4 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Stack now 0 8 22 4 12 26
+Reducing stack by rule 13 (line 104):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 31
+Stack now 0 8 22 31
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack by rule 10 (line 93):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Stack now 0 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 24
+Stack now 0 8 24
+Reducing stack by rule 4 (line 75):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Stack now 0 7
+Reducing stack by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Stack now 0 6
+Reading a token
+Next token is token end of input (2.1: )
+Shifting token end of input (2.1: )
+Entering state 16
+Stack now 0 6 16
+Stack now 0 6 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1446: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -158067,7 +157307,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1453: "$PERL" -pi -e 'use strict;
+./calc.at:1443: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -158077,22 +157317,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 ./calc.at:1446: cat stderr
+./calc.at:1443: cat stderr
 ./calc.at:1446:  $PREPARSER ./calc  /dev/null
+532. calc.at:1443:  ok
 stderr:
-./calc.at:1454: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 Starting parse
 Entering state 0
 Stack now 0
@@ -158101,16 +157330,9 @@
 1.1: syntax error, unexpected end of input
 Cleanup: discarding lookahead token end of input (1.1: )
 Stack now 0
-./calc.at:1453: cat stderr
 ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1454: cat stderr
-input:
-  | (# + 1) = 1111
-stderr:
-./calc.at:1453:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1448: cat stderr
-1.2: syntax error: invalid character: '#'
+
 Starting parse
 Entering state 0
 Stack now 0
@@ -158119,12 +157341,6 @@
 1.1: syntax error, unexpected end of input
 Cleanup: discarding lookahead token end of input (1.1: )
 Stack now 0
-./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 1 + 2 * 3 + !+ ++
-stderr:
-./calc.at:1454:  $PREPARSER ./calc  input
-1.2: syntax error: invalid character: '#'
 ./calc.at:1446: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -158135,36 +157351,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-input:
-  | (#) + (#) = 2222
-./calc.at:1448:  $PREPARSER ./calc  input
 ./calc.at:1446: cat stderr
-stderr:
-./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1453: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
 input:
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 ./calc.at:1446:  $PREPARSER ./calc  input
-./calc.at:1454: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1453: cat stderr
 stderr:
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
 Starting parse
 Entering state 0
 Stack now 0
@@ -158482,23 +157673,12 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+541. calc.at:1457: testing Calculator C++ parse.error=custom  ...
 ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1454:  $PREPARSER ./calc  input
-stderr:
+./calc.at:1457: mv calc.y.tmp calc.y
+
+./calc.at:1457: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 stderr:
-./calc.at:1448: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -158816,13 +157996,6 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (1 + # + 1) = 1111
-./calc.at:1453:  $PREPARSER ./calc  input
-stderr:
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1446: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -158833,48 +158006,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1448: cat stderr
-./calc.at:1454: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 ./calc.at:1446: cat stderr
-./calc.at:1453: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1453: cat stderr
-./calc.at:1454: cat stderr
 input:
   | (!!) + (1 2) = 1
 ./calc.at:1446:  $PREPARSER ./calc  input
-input:
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1453:  $PREPARSER ./calc  input
-  | (1 + #) = 1111
-./calc.at:1448:  $PREPARSER ./calc  input
-input:
-stderr:
-stderr:
-  | (#) + (#) = 2222
 stderr:
-./calc.at:1454:  $PREPARSER ./calc  input
-1.11-17: error: null divisor
 Starting parse
 Entering state 0
 Stack now 0
@@ -159015,18 +158151,8 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.6: syntax error: invalid character: '#'
-./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.6: syntax error: invalid character: '#'
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -159167,30 +158293,7 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-1.11-17: error: null divisor
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1448: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1453: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1457: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 ./calc.at:1446: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -159201,63 +158304,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1454: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1453: cat stderr
-./calc.at:1454: cat stderr
-./calc.at:1448: cat stderr
-538. calc.at:1453:  ok
 ./calc.at:1446: cat stderr
 input:
-  | (# + 1) = 1111
-./calc.at:1448:  $PREPARSER ./calc  input
-input:
-  | (1 + #) = 1111
-./calc.at:1454:  $PREPARSER ./calc  input
-stderr:
-stderr:
-1.2: syntax error: invalid character: '#'
-./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-
-input:
-1.6: syntax error: invalid character: '#'
-./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error: invalid character: '#'
   | (- *) + (1 2) = 1
 ./calc.at:1446:  $PREPARSER ./calc  input
 stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1448: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1454: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1454: cat stderr
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -159406,13 +158457,8 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-input:
-  | (# + 1) = 1111
-./calc.at:1448: cat stderr
-./calc.at:1454:  $PREPARSER ./calc  input
 ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -159561,11 +158607,6 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-1.2: syntax error: invalid character: '#'
-./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1448:  $PREPARSER ./calc  input
 ./calc.at:1446: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -159576,45 +158617,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-1.2: syntax error: invalid character: '#'
-stderr:
-./calc.at:1454: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-1.6: syntax error: invalid character: '#'
-./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1454: cat stderr
-stderr:
 ./calc.at:1446: cat stderr
-1.6: syntax error: invalid character: '#'
-input:
-  | (1 + # + 1) = 1111
 input:
-./calc.at:1454:  $PREPARSER ./calc  input
-stderr:
   | (* *) + (*) + (*)
 ./calc.at:1446:  $PREPARSER ./calc  input
-1.6: syntax error: invalid character: '#'
-./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1448: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -159767,9 +158773,21 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-1.6: syntax error: invalid character: '#'
 ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stdout:
+stderr:
+./calc.at:1453: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc
+
 Starting parse
 Entering state 0
 Stack now 0
@@ -159921,21 +158939,21 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1454: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-542. calc.at:1458: testing Calculator C++ parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
-./calc.at:1458: mv calc.y.tmp calc.y
-
-./calc.at:1448: cat stderr
-./calc.at:1458: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+input:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1453:  $PREPARSER ./calc  input
 ./calc.at:1446: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -159946,28 +158964,18 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1454: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1446: cat stderr
-./calc.at:1448:  $PREPARSER ./calc  input
-input:
-  | (1 + 1) / (1 - 1)
 stderr:
-1.11-17: error: null divisor
-./calc.at:1454:  $PREPARSER ./calc  input
-./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1446: cat stderr
 stderr:
+./calc.at:1453: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 input:
-1.11-17: error: null divisor
-./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 1 + 2 * 3 + !+ ++
 ./calc.at:1446:  $PREPARSER ./calc  input
+input:
+  | 1 2
 stderr:
-1.11-17: error: null divisor
-stderr:
-stderr:
-1.11-17: error: null divisor
+./calc.at:1453:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -160048,29 +159056,11 @@
 Stack now 0 8 20
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
+stderr:
 ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1448: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+1.3: syntax error, unexpected number
+./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1454: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1454: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -160151,14 +159141,25 @@
 Stack now 0 8 20
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
+stderr:
 ./calc.at:1446: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-./calc.at:1448: cat stderr
-539. calc.at:1454:  ok
+1.3: syntax error, unexpected number
+./calc.at:1453: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 input:
   | 1 + 2 * 3 + !- ++
 ./calc.at:1446:  $PREPARSER ./calc  input
-535. calc.at:1448:  ok
+./calc.at:1453: cat stderr
 stderr:
+input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -160239,9 +159240,15 @@
 Stack now 0 8 20
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-
+  | 1//2
+./calc.at:1453:  $PREPARSER ./calc  input
+stderr:
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -160322,7 +159329,16 @@
 Stack now 0 8 20
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-
+./calc.at:1453: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1446: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -160333,12 +159349,32 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1453: cat stderr
 ./calc.at:1446: cat stderr
 input:
+  | error
+./calc.at:1453:  $PREPARSER ./calc  input
+input:
+stderr:
   | (#) + (#) = 2222
+1.1: syntax error, unexpected invalid token
+./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1446:  $PREPARSER ./calc  input
-./calc.at:1458: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 stderr:
+stderr:
+stderr:
+stdout:
+./calc.at:1454: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc
+
 Starting parse
 Entering state 0
 Stack now 0
@@ -160462,11 +159498,37 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+1.1: syntax error, unexpected invalid token
+stderr:
+stdout:
 ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-543. calc.at:1459: testing Calculator C++ parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} parse.lac=full  ...
-./calc.at:1459: mv calc.y.tmp calc.y
+./calc.at:1448: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc calc.hh
 
-./calc.at:1459: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+input:
+stderr:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1448:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -160591,6 +159653,38 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1453: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1454:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1453: cat stderr
+./calc.at:1448: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+stderr:
 ./calc.at:1446: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -160601,11 +159695,35 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1454: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+input:
+input:
+  | 1 2
+./calc.at:1448:  $PREPARSER ./calc  input
 ./calc.at:1446: cat stderr
+  | 1 = 2 = 3
+./calc.at:1453:  $PREPARSER ./calc  input
+input:
+stderr:
+stderr:
+1.3: syntax error
+./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 2
+./calc.at:1454:  $PREPARSER ./calc  input
+1.7: syntax error, unexpected '='
+./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
 input:
+1.3: syntax error
+1.3: syntax error, unexpected number
+./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
   | (1 + #) = 1111
 ./calc.at:1446:  $PREPARSER ./calc  input
 stderr:
+1.7: syntax error, unexpected '='
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -160705,9 +159823,30 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+1.3: syntax error, unexpected number
+./calc.at:1448: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1453: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
-544. calc.at:1468: testing Calculator glr.cc  ...
+./calc.at:1448: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -160807,6 +159946,18 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1454: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1453: cat stderr
+./calc.at:1454: cat stderr
 ./calc.at:1446: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -160817,14 +159968,45 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1468: mv calc.y.tmp calc.y
-
+input:
+  | 1//2
+./calc.at:1448:  $PREPARSER ./calc  input
+stderr:
+input:
+input:
+1.3: syntax error
 ./calc.at:1446: cat stderr
-./calc.at:1468: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1//2
+  | 
+  | +1
+./calc.at:1454:  $PREPARSER ./calc  input
+./calc.at:1453:  $PREPARSER ./calc  input
+stderr:
+1.3: syntax error
+stderr:
+stderr:
+2.1: syntax error, unexpected '+'
+./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
   | (# + 1) = 1111
 ./calc.at:1446:  $PREPARSER ./calc  input
 stderr:
+stderr:
+./calc.at:1448: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
 Starting parse
 Entering state 0
 Stack now 0
@@ -160921,7 +160103,19 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+2.1: syntax error, unexpected '+'
+./calc.at:1454: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1448: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -161019,6 +160213,26 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1454: cat stderr
+input:
+  | error
+./calc.at:1448:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1453: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+  | error
+./calc.at:1454:  $PREPARSER ./calc  input
+1.1: syntax error
+./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1446: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -161029,12 +160243,34 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+1.1: syntax error, unexpected invalid token
+./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1453: cat stderr
+stderr:
 ./calc.at:1446: cat stderr
+stderr:
+1.1: syntax error, unexpected invalid token
+1.1: syntax error
+./calc.at:1453:  $PREPARSER ./calc  /dev/null
+stderr:
+./calc.at:1454: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+1.1: syntax error, unexpected end of input
+./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-./calc.at:1459: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
   | (1 + # + 1) = 1111
 ./calc.at:1446:  $PREPARSER ./calc  input
 stderr:
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -161150,9 +160386,22 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1468: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+./calc.at:1448: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1454: cat stderr
+1.1: syntax error, unexpected end of input
 ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1448: cat stderr
 stderr:
+input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -161268,6 +160517,21 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+input:
+  | 1 = 2 = 3
+./calc.at:1448:  $PREPARSER ./calc  input
+  | 1 = 2 = 3
+./calc.at:1454:  $PREPARSER ./calc  input
+./calc.at:1453: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1446: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -161278,10 +160542,52 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+stderr:
 ./calc.at:1446: cat stderr
+1.7: syntax error
+./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.7: syntax error, unexpected '='
+./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1453: cat stderr
+stderr:
+1.7: syntax error, unexpected '='
+input:
+1.7: syntax error
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 input:
+./calc.at:1453:  $PREPARSER ./calc  input
+./calc.at:1448: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
   | (1 + 1) / (1 - 1)
 ./calc.at:1446:  $PREPARSER ./calc  input
+./calc.at:1454: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+stderr:
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.1-46: error: 4444 != 1
+./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -161424,7 +160730,14 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1448: cat stderr
 ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.1-46: error: 4444 != 1
+./calc.at:1454: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -161567,7 +160880,8 @@
 Stack now 0 6 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1446: "$PERL" -pi -e 'use strict;
+input:
+./calc.at:1453: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -161577,55 +160891,69 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1446: cat stderr
-534. calc.at:1446:  ok
-
-545. calc.at:1469: testing Calculator glr2.cc  ...
-./calc.at:1469: mv calc.y.tmp calc.y
-
-./calc.at:1469: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+  | 
+  | +1
+./calc.at:1448:  $PREPARSER ./calc  input
 stderr:
-stdout:
-./calc.at:1449: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc calc.hh
-
 input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
+2.1: syntax error
+./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1453: cat stderr
+./calc.at:1446: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
   | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1449:  $PREPARSER ./calc  input
-./calc.at:1469: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+  | +1
+input:
+  | (!!) + (1 2) = 1
+./calc.at:1454:  $PREPARSER ./calc  input
+./calc.at:1453:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1449: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | 1 2
-./calc.at:1449:  $PREPARSER ./calc  input
+1.11: syntax error, unexpected number
+1.1-16: error: 2222 != 1
+./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+2.1: syntax error
+./calc.at:1446: cat stderr
 stderr:
-1.3: syntax error
-./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+534. calc.at:1446: 2.1: syntax error, unexpected '+'
+./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.3: syntax error
-./calc.at:1449: "$PERL" -pi -e 'use strict;
+ ok
+stderr:
+1.11: syntax error, unexpected number
+1.1-16: error: 2222 != 1
+./calc.at:1448: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+2.1: syntax error, unexpected '+'
+./calc.at:1453: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1448: cat stderr
+
+./calc.at:1454: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -161636,8 +160964,27 @@
   }eg
 ' expout || exit 77
 stderr:
-./calc.at:1449: cat stderr
+./calc.at:1448:  $PREPARSER ./calc  /dev/null
+./calc.at:1453: cat stderr
+stdout:
+./calc.at:1454: cat stderr
+stderr:
+./calc.at:1449: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc calc.hh
+
+stderr:
+./calc.at:1454:  $PREPARSER ./calc  /dev/null
 stdout:
+1.1: syntax error
+./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1451: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
@@ -161650,14 +160997,13 @@
         )' calc.cc calc.hh
 
 input:
-  | 1//2
-./calc.at:1449:  $PREPARSER ./calc  input
 stderr:
-input:
-1.3: syntax error
-./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (- *) + (1 2) = 1
 stderr:
-1.3: syntax error
+./calc.at:1453:  $PREPARSER ./calc  input
+input:
+input:
+1.1: syntax error, unexpected end of file
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -161671,12 +161017,53 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1451:  $PREPARSER ./calc  input
+./calc.at:1449:  $PREPARSER ./calc  input
+./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.1: syntax error
 stderr:
-./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1449: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
+stderr:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1451:  $PREPARSER ./calc  input
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
+1.1-17: error: 2222 != 1
+stderr:
+./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.1: syntax error, unexpected end of file
+stderr:
+./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1449: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+stderr:
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
+1.1-17: error: 2222 != 1
+./calc.at:1448: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1454: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
     my $unexp = $1;
     my @exps = $2 =~ /\[(.*?)\]/g;
     ($#exps && $#exps < 4)
@@ -161685,24 +161072,347 @@
   }eg
 ' expout || exit 77
 stderr:
+./calc.at:1453: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+./calc.at:1454: cat stderr
+  | 1 2
+./calc.at:1449:  $PREPARSER ./calc  input
+./calc.at:1448: cat stderr
 ./calc.at:1451: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+stderr:
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1454:  $PREPARSER ./calc  input
+input:
+1.3: syntax error
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1448:  $PREPARSER ./calc  input
+stderr:
+542. calc.at:1458: testing Calculator C++ parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
+./calc.at:1458: mv calc.y.tmp calc.y
+
+stderr:
+stderr:
+./calc.at:1453: cat stderr
+input:
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.1-46: error: 4444 != 1
+./calc.at:1458: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.3: syntax error
+  | 1 2
+./calc.at:1451:  $PREPARSER ./calc  input
+stderr:
+stderr:
+input:
+stderr:
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.1-46: error: 4444 != 1
+  | (* *) + (*) + (*)
+./calc.at:1453:  $PREPARSER ./calc  input
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+./calc.at:1449: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+1.3: syntax error
+./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1454: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1454: cat stderr
+./calc.at:1448: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+./calc.at:1449: cat stderr
+stderr:
+input:
+1.3: syntax error
+  | (!!) + (1 2) = 1
+./calc.at:1454:  $PREPARSER ./calc  input
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+input:
+stderr:
+  | 1//2
+./calc.at:1449:  $PREPARSER ./calc  input
+1.11: syntax error, unexpected number
+1.1-16: error: 2222 != 1
+./calc.at:1453: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1448: cat stderr
+./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.3: syntax error
+./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1453: cat stderr
+./calc.at:1451: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+input:
+1.3: syntax error
+stderr:
+  | (!!) + (1 2) = 1
+./calc.at:1448:  $PREPARSER ./calc  input
+1.11: syntax error, unexpected number
+1.1-16: error: 2222 != 1
+./calc.at:1449: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+./calc.at:1454: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1453:  $PREPARSER ./calc  input
+1.11: syntax error
+1.1-16: error: 2222 != 1
+./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1449: cat stderr
+./calc.at:1451: cat stderr
+./calc.at:1454: cat stderr
+stderr:
+1.11: syntax error
+1.1-16: error: 2222 != 1
+stderr:
 input:
+./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | error
 ./calc.at:1449:  $PREPARSER ./calc  input
+./calc.at:1458: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 input:
-  | 1 2
-./calc.at:1451:  $PREPARSER ./calc  input
+input:
+stderr:
+  | (- *) + (1 2) = 1
+./calc.at:1454:  $PREPARSER ./calc  input
 stderr:
 1.1: syntax error
+./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1448: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | 1//2
+./calc.at:1451:  $PREPARSER ./calc  input
+./calc.at:1453: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+stderr:
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
+1.1-17: error: 2222 != 1
+./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
+1.1: syntax error
 1.3: syntax error
 ./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.12: syntax error, unexpected number
+1.1-17: error: 2222 != 1
+stderr:
+1.3: syntax error
+input:
+./calc.at:1449: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1454: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | 1 + 2 * 3 + !- ++
+./calc.at:1448: cat stderr
+./calc.at:1453:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+./calc.at:1454: cat stderr
+  | (- *) + (1 2) = 1
+./calc.at:1448:  $PREPARSER ./calc  input
+./calc.at:1451: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1449: cat stderr
+stderr:
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+input:
+input:
+  | 1 = 2 = 3
+./calc.at:1449:  $PREPARSER ./calc  input
+stderr:
+  | (* *) + (*) + (*)
+./calc.at:1454:  $PREPARSER ./calc  input
+./calc.at:1451: cat stderr
+stderr:
+stderr:
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+1.7: syntax error
 ./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+input:
+./calc.at:1453: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+  | error
+./calc.at:1451:  $PREPARSER ./calc  input
+1.7: syntax error
+./calc.at:1448: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+./calc.at:1453: cat stderr
 stderr:
+1.1: syntax error
+./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1448: cat stderr
+./calc.at:1454: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 1.1: syntax error
-1.3: syntax error
 ./calc.at:1449: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -161713,6 +161423,18 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+input:
+  | (* *) + (*) + (*)
+  | (#) + (#) = 2222
+./calc.at:1453:  $PREPARSER ./calc  input
+./calc.at:1448:  $PREPARSER ./calc  input
+./calc.at:1454: cat stderr
+stderr:
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1451: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -161723,24 +161445,252 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1449: cat stderr
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+input:
 ./calc.at:1451: cat stderr
+stderr:
+  | 1 + 2 * 3 + !+ ++
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1454:  $PREPARSER ./calc  input
+stderr:
 input:
+./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1453: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 input:
-  | 1 = 2 = 3
+  | 
+  | +1
+./calc.at:1448: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1449:  $PREPARSER ./calc  input
-  | 1//2
+  | 1 = 2 = 3
 ./calc.at:1451:  $PREPARSER ./calc  input
 stderr:
-1.7: syntax error
+./calc.at:1453: cat stderr
+2.1: syntax error
 ./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.3: syntax error
 stderr:
+input:
+1.7: syntax error
 ./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1448: cat stderr
+./calc.at:1454: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+stderr:
+  | (1 + #) = 1111
+./calc.at:1453:  $PREPARSER ./calc  input
+2.1: syntax error
+stderr:
+stderr:
+1.6: syntax error: invalid character: '#'
 1.7: syntax error
+./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+./calc.at:1449: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | 1 + 2 * 3 + !- ++
 stderr:
-1.3: syntax error
+./calc.at:1454:  $PREPARSER ./calc  input
+stderr:
+1.6: syntax error: invalid character: '#'
+input:
+./calc.at:1451: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1448:  $PREPARSER ./calc  input
+./calc.at:1449: cat stderr
+stderr:
+./calc.at:1453: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1453: cat stderr
+stderr:
+./calc.at:1451: cat stderr
+./calc.at:1449:  $PREPARSER ./calc  /dev/null
+./calc.at:1448: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+input:
+  | (# + 1) = 1111
+./calc.at:1454: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1453:  $PREPARSER ./calc  input
+stderr:
+input:
+1.1: syntax error
+./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1454: cat stderr
+1.2: syntax error: invalid character: '#'
+./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 
+  | +1
+./calc.at:1451:  $PREPARSER ./calc  input
+stderr:
+stderr:
+1.1: syntax error
+input:
+2.1: syntax error
+./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 + !- ++
+./calc.at:1448:  $PREPARSER ./calc  input
+  | (#) + (#) = 2222
+stderr:
+./calc.at:1454:  $PREPARSER ./calc  input
+stderr:
+2.1: syntax error
+1.2: syntax error: invalid character: '#'
+stderr:
+stderr:
+./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1451: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+./calc.at:1449: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1453: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1448: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1449: cat stderr
+./calc.at:1451: cat stderr
+./calc.at:1453: cat stderr
+./calc.at:1451:  $PREPARSER ./calc  /dev/null
+./calc.at:1454: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+input:
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1449:  $PREPARSER ./calc  input
+./calc.at:1448: cat stderr
+stderr:
+./calc.at:1454: cat stderr
+1.1: syntax error
+./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (1 + # + 1) = 1111
+./calc.at:1453:  $PREPARSER ./calc  input
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+stderr:
+stderr:
+1.1: syntax error
+1.6: syntax error: invalid character: '#'
+./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1449: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -161751,7 +161701,17 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+input:
+  | (1 + #) = 1111
+./calc.at:1454:  $PREPARSER ./calc  input
+stderr:
+1.6: syntax error: invalid character: '#'
+  | (#) + (#) = 2222
+./calc.at:1448:  $PREPARSER ./calc  input
 stderr:
+1.6: syntax error: invalid character: '#'
+./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1451: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -161762,8 +161722,482 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stdout:
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.6: syntax error: invalid character: '#'
+./calc.at:1449: cat stderr
+./calc.at:1453: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1453: cat stderr
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1451: cat stderr
+./calc.at:1454: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+  | (1 + 1) / (1 - 1)
+input:
+./calc.at:1453:  $PREPARSER ./calc  input
+./calc.at:1454: cat stderr
+  | (!!) + (1 2) = 1
+stderr:
+./calc.at:1449:  $PREPARSER ./calc  input
+./calc.at:1448: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+stderr:
+1.11-17: error: null divisor
+./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.11: syntax error
+1.1-16: error: 2222 != 1
+./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1451:  $PREPARSER ./calc  input
+stderr:
+stderr:
+input:
+1.11-17: error: null divisor
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+./calc.at:1448: cat stderr
+./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+  | (# + 1) = 1111
+./calc.at:1454:  $PREPARSER ./calc  input
+1.11: syntax error
+1.1-16: error: 2222 != 1
+input:
+stderr:
+stderr:
+  | (1 + #) = 1111
+./calc.at:1448:  $PREPARSER ./calc  input
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+stderr:
+1.2: syntax error: invalid character: '#'
+./calc.at:1449: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1453: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.6: syntax error: invalid character: '#'
+./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1453: cat stderr
+1.2: syntax error: invalid character: '#'
+stderr:
+./calc.at:1451: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+1.6: syntax error: invalid character: '#'
+./calc.at:1454: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+538. calc.at:1453:  ok
+./calc.at:1449: cat stderr
+./calc.at:1454: cat stderr
+./calc.at:1448: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+  | (1 + # + 1) = 1111
+./calc.at:1454:  $PREPARSER ./calc  input
+input:
+./calc.at:1448: cat stderr
+./calc.at:1451: cat stderr
+stderr:
+1.6: syntax error: invalid character: '#'
+  | (- *) + (1 2) = 1
+./calc.at:1449:  $PREPARSER ./calc  input
+./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+input:
+
+  | (# + 1) = 1111
+  | (!!) + (1 2) = 1
+./calc.at:1451:  $PREPARSER ./calc  input
+./calc.at:1448:  $PREPARSER ./calc  input
+1.6: syntax error: invalid character: '#'
+stderr:
+stderr:
+1.11: syntax error
+1.1-16: error: 2222 != 1
+./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+./calc.at:1454: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+1.2: syntax error: invalid character: '#'
+./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.11: syntax error
+1.1-16: error: 2222 != 1
+./calc.at:1454: cat stderr
+stderr:
+./calc.at:1449: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+1.2: syntax error: invalid character: '#'
+input:
+./calc.at:1451: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1448: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | (1 + 1) / (1 - 1)
+./calc.at:1454:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1449: cat stderr
+1.11-17: error: null divisor
+./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1451: cat stderr
+stderr:
+./calc.at:1448: cat stderr
+input:
+1.11-17: error: null divisor
+543. calc.at:1459: testing Calculator C++ parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} parse.lac=full  ...
+./calc.at:1459: mv calc.y.tmp calc.y
+
+  | (- *) + (1 2) = 1
+./calc.at:1451:  $PREPARSER ./calc  input
+input:
+stderr:
+input:
+  | (1 + # + 1) = 1111
+./calc.at:1448:  $PREPARSER ./calc  input
+./calc.at:1459: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+  | (* *) + (*) + (*)
+./calc.at:1449:  $PREPARSER ./calc  input
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.6: syntax error: invalid character: '#'
+./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1454: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+stderr:
+stderr:
+1.6: syntax error: invalid character: '#'
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+./calc.at:1454: cat stderr
+./calc.at:1451: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1448: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1449: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+539. calc.at:1454:  ok
+./calc.at:1451: cat stderr
+./calc.at:1449: cat stderr
+./calc.at:1448: cat stderr
+input:
+input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1449:  $PREPARSER ./calc  input
+input:
+  | (* *) + (*) + (*)
+./calc.at:1451:  $PREPARSER ./calc  input
+stderr:
+  | (1 + 1) / (1 - 1)
+./calc.at:1448:  $PREPARSER ./calc  input
+
+stderr:
+./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1459: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+stderr:
+stderr:
+1.11-17: error: null divisor
+./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1449: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+stderr:
+1.11-17: error: null divisor
+./calc.at:1451: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1449:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1448: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+./calc.at:1451: cat stderr
+input:
+./calc.at:1448: cat stderr
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1451:  $PREPARSER ./calc  input
+./calc.at:1449: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+535. calc.at:1448:  ok
+./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+544. calc.at:1468: testing Calculator glr.cc  ...
+./calc.at:1468: mv calc.y.tmp calc.y
+
+./calc.at:1449: cat stderr
+stderr:
+./calc.at:1451: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+./calc.at:1468: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+input:
+  | (#) + (#) = 2222
+./calc.at:1449:  $PREPARSER ./calc  input
+
+stderr:
+input:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1451:  $PREPARSER ./calc  input
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1449: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
 ./calc.at:1449: cat stderr
+./calc.at:1451: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+  | (1 + #) = 1111
+./calc.at:1449:  $PREPARSER ./calc  input
+./calc.at:1451: cat stderr
+stderr:
+1.6: syntax error: invalid character: '#'
+./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+545. calc.at:1469: testing Calculator glr2.cc  ...
+./calc.at:1469: mv calc.y.tmp calc.y
+
+./calc.at:1469: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+input:
+  | (#) + (#) = 2222
+stderr:
+./calc.at:1451:  $PREPARSER ./calc  input
+1.6: syntax error: invalid character: '#'
+stderr:
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1449: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+stdout:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1468: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 ./calc.at:1455: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
@@ -161775,12 +162209,17 @@
         || /\t/
         )' calc.cc
 
-./calc.at:1451: cat stderr
-input:
+./calc.at:1451: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 input:
-  | 
-  | +1
-./calc.at:1449:  $PREPARSER ./calc  input
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -161795,30 +162234,9 @@
   | 2^2^3 = 256
   | (2^2)^3 = 64
 ./calc.at:1455:  $PREPARSER ./calc  input
+./calc.at:1449: cat stderr
+./calc.at:1451: cat stderr
 stderr:
-input:
-2.1: syntax error
-./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | error
-stderr:
-./calc.at:1451:  $PREPARSER ./calc  input
-2.1: syntax error
-stderr:
-1.1: syntax error
-./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-./calc.at:1449: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-1.1: syntax error
 Starting parse
 Entering state 0
 Stack now 0
@@ -162903,7 +163321,12 @@
 Cleanup: popping token end of file (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
 ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | (# + 1) = 1111
+./calc.at:1449:  $PREPARSER ./calc  input
+stderr:
 stderr:
+input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -163988,25 +164411,22 @@
 Cleanup: popping token end of file (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
 ./calc.at:1455: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-./calc.at:1449: cat stderr
-./calc.at:1451: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+1.2: syntax error: invalid character: '#'
+./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (1 + #) = 1111
+./calc.at:1451:  $PREPARSER ./calc  input
+stderr:
+stderr:
 input:
-./calc.at:1449:  $PREPARSER ./calc  /dev/null
+1.6: syntax error: invalid character: '#'
+./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error: invalid character: '#'
   | 1 2
 ./calc.at:1455:  $PREPARSER ./calc  input
+./calc.at:1469: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 stderr:
 stderr:
-1.1: syntax error
-./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.6: syntax error: invalid character: '#'
 Starting parse
 Entering state 0
 Stack now 0
@@ -164037,10 +164457,27 @@
 Cleanup: discarding lookahead token number (1.3: 2)
 Stack now 0
 ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1451: cat stderr
-stderr:
+./calc.at:1449: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1451: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
-1.1: syntax error
 Starting parse
 Entering state 0
 Stack now 0
@@ -164070,7 +164507,9 @@
 Stack now 0
 Cleanup: discarding lookahead token number (1.3: 2)
 Stack now 0
-./calc.at:1449: "$PERL" -pi -e 'use strict;
+./calc.at:1449: cat stderr
+./calc.at:1451: cat stderr
+./calc.at:1455: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -164081,66 +164520,26 @@
   }eg
 ' expout || exit 77
 input:
-  | 1 = 2 = 3
+input:
+  | (# + 1) = 1111
 ./calc.at:1451:  $PREPARSER ./calc  input
-./calc.at:1455: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-1.7: syntax error
-./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1449: cat stderr
-stderr:
-1.7: syntax error
 ./calc.at:1455: cat stderr
-input:
-./calc.at:1451: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+  | (1 + # + 1) = 1111
 ./calc.at:1449:  $PREPARSER ./calc  input
 stderr:
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
+stderr:
+1.2: syntax error: invalid character: '#'
+./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.6: syntax error: invalid character: '#'
 ./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-./calc.at:1451: cat stderr
 stderr:
   | 1//2
 ./calc.at:1455:  $PREPARSER ./calc  input
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
+1.2: syntax error: invalid character: '#'
+stderr:
+1.6: syntax error: invalid character: '#'
 stderr:
-./calc.at:1449: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 Starting parse
 Entering state 0
 Stack now 0
@@ -164184,9 +164583,28 @@
 Cleanup: discarding lookahead token '/' (1.3: )
 Stack now 0
 ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
+./calc.at:1449: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1451: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
-./calc.at:1449: cat stderr
+./calc.at:1451: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -164229,11 +164647,11 @@
 Stack now 0
 Cleanup: discarding lookahead token '/' (1.3: )
 Stack now 0
+./calc.at:1449: cat stderr
 input:
-  | 
-  | +1
+  | (1 + # + 1) = 1111
 ./calc.at:1451:  $PREPARSER ./calc  input
-  | (!!) + (1 2) = 1
+input:
 ./calc.at:1455: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -164244,33 +164662,20 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+  | (1 + 1) / (1 - 1)
 ./calc.at:1449:  $PREPARSER ./calc  input
 stderr:
-stderr:
-1.11: syntax error
-1.1-16: error: 2222 != 1
-./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-2.1: syntax error
+1.6: syntax error: invalid character: '#'
 ./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-2.1: syntax error
 ./calc.at:1455: cat stderr
+1.11-17: error: null divisor
+./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.6: syntax error: invalid character: '#'
 stderr:
-./calc.at:1451: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-1.11: syntax error
-1.1-16: error: 2222 != 1
 input:
-./calc.at:1451: cat stderr
-./calc.at:1449: "$PERL" -pi -e 'use strict;
+./calc.at:1451: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -164280,10 +164685,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1451:  $PREPARSER ./calc  /dev/null
+1.11-17: error: null divisor
   | error
 ./calc.at:1455:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1451: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -164308,12 +164714,18 @@
 Cleanup: discarding lookahead token invalid token (1.1: )
 Stack now 0
 ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1449: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
-1.1: syntax error
-./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-1.1: syntax error
+input:
 Starting parse
 Entering state 0
 Stack now 0
@@ -164337,6 +164749,14 @@
 1.1: syntax error, unexpected invalid token
 Cleanup: discarding lookahead token invalid token (1.1: )
 Stack now 0
+  | (1 + 1) / (1 - 1)
+./calc.at:1451:  $PREPARSER ./calc  input
+./calc.at:1449: cat stderr
+stderr:
+1.11-17: error: null divisor
+./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+536. calc.at:1449:  ok
+stderr:
 ./calc.at:1455: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -164347,6 +164767,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+1.11-17: error: null divisor
+./calc.at:1455: cat stderr
 ./calc.at:1451: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -164357,21 +164779,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1449: cat stderr
-./calc.at:1455: cat stderr
-input:
+
 input:
   | 1 = 2 = 3
 ./calc.at:1455:  $PREPARSER ./calc  input
-  | (- *) + (1 2) = 1
-./calc.at:1449:  $PREPARSER ./calc  input
-stderr:
 ./calc.at:1451: cat stderr
 stderr:
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -164421,26 +164834,9 @@
 Stack now 0
 Cleanup: discarding lookahead token '=' (1.7: )
 Stack now 0
+537. calc.at:1451:  ok
 ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1451:  $PREPARSER ./calc  input
-stderr:
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-stderr:
 stderr:
-./calc.at:1449: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 Starting parse
 Entering state 0
 Stack now 0
@@ -164490,19 +164886,6 @@
 Stack now 0
 Cleanup: discarding lookahead token '=' (1.7: )
 Stack now 0
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
-./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1449: cat stderr
-stderr:
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
 ./calc.at:1455: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -164513,38 +164896,17 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-./calc.at:1451: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+
 ./calc.at:1455: cat stderr
-  | (* *) + (*) + (*)
-./calc.at:1449:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1451: cat stderr
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-stderr:
   | 
   | +1
 ./calc.at:1455:  $PREPARSER ./calc  input
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-input:
-  | (!!) + (1 2) = 1
+546. calc.at:1476: testing Calculator C++ %glr-parser   ...
+./calc.at:1476: mv calc.y.tmp calc.y
+
+./calc.at:1476: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 stderr:
-./calc.at:1451:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -164583,21 +164945,7 @@
 Stack now 0
 Cleanup: discarding lookahead token '+' (2.1: )
 Stack now 0
-stderr:
 ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1449: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-1.11: syntax error
-1.1-16: error: 2222 != 1
-./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -164637,6 +164985,11 @@
 Stack now 0
 Cleanup: discarding lookahead token '+' (2.1: )
 Stack now 0
+547. calc.at:1476: testing Calculator glr2.cc   ...
+./calc.at:1476: mv calc.y.tmp calc.y
+
+./calc.at:1476: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+stderr:
 ./calc.at:1455: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -164647,24 +165000,22 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-1.11: syntax error
-1.1-16: error: 2222 != 1
+stdout:
+./calc.at:1457: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc
+
 ./calc.at:1455: cat stderr
-./calc.at:1451: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 ./calc.at:1455:  $PREPARSER ./calc  /dev/null
-./calc.at:1449: cat stderr
+input:
 stderr:
-./calc.at:1451: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -164689,10 +165040,22 @@
 Cleanup: discarding lookahead token end of file (1.1: )
 Stack now 0
 ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1457:  $PREPARSER ./calc  input
+stderr:
 stderr:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1449:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Stack now 0
@@ -164716,12 +165079,7 @@
 1.1: syntax error, unexpected end of file
 Cleanup: discarding lookahead token end of file (1.1: )
 Stack now 0
-stderr:
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1451:  $PREPARSER ./calc  input
-./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1455: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -164732,38 +165090,21 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-stderr:
-./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1449: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+./calc.at:1476: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 stderr:
 ./calc.at:1455: cat stderr
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-./calc.at:1451: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1457: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 input:
 input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1449:  $PREPARSER ./calc  input
-stderr:
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 ./calc.at:1455:  $PREPARSER ./calc  input
-./calc.at:1451: cat stderr
-./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 2
+./calc.at:1457:  $PREPARSER ./calc  input
+./calc.at:1476: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 stderr:
 stderr:
+syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -165179,11 +165520,9 @@
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1451:  $PREPARSER ./calc  input
 stderr:
 stderr:
+syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
 Starting parse
 Entering state 0
 Stack now 0
@@ -165598,24 +165937,6 @@
 Stack now 0 6 16
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1449: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
 ./calc.at:1455: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -165626,37 +165947,15 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1449: cat stderr
-./calc.at:1451: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1457: cat stderr
 ./calc.at:1455: cat stderr
-./calc.at:1451: cat stderr
 input:
 input:
-input:
-  | (#) + (#) = 2222
-./calc.at:1449:  $PREPARSER ./calc  input
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1451:  $PREPARSER ./calc  input
+  | 1//2
+./calc.at:1457:  $PREPARSER ./calc  input
   | (!!) + (1 2) = 1
-stderr:
 ./calc.at:1455:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-stderr:
-./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1451: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -165817,12 +166116,9 @@
 Stack now 0 6 16
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
 ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1451:  $PREPARSER ./calc  input
+syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
+./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -165984,17 +166280,7 @@
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 stderr:
-./calc.at:1449: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
 ./calc.at:1455: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -166005,30 +166291,15 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
+./calc.at:1457: cat stderr
 ./calc.at:1455: cat stderr
-./calc.at:1449: cat stderr
-./calc.at:1451: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 input:
-  | (1 + #) = 1111
 input:
-./calc.at:1449:  $PREPARSER ./calc  input
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | (- *) + (1 2) = 1
 ./calc.at:1455:  $PREPARSER ./calc  input
+  | error
+./calc.at:1457:  $PREPARSER ./calc  input
 stderr:
-1.6: syntax error: invalid character: '#'
 stderr:
 Starting parse
 Entering state 0
@@ -166217,17 +166488,9 @@
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1451: cat stderr
-./calc.at:1449: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -166415,14 +166678,7 @@
 Stack now 0 6 16
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-input:
-  | (#) + (#) = 2222
-./calc.at:1451:  $PREPARSER ./calc  input
-./calc.at:1449: cat stderr
-stderr:
-input:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
+syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
 ./calc.at:1455: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -166433,34 +166689,18 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (# + 1) = 1111
-stderr:
-./calc.at:1449:  $PREPARSER ./calc  input
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
 ./calc.at:1455: cat stderr
-stderr:
-./calc.at:1451: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-1.2: syntax error: invalid character: '#'
-./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1457: cat stderr
+input:
 input:
-./calc.at:1451: cat stderr
-stderr:
   | (* *) + (*) + (*)
 ./calc.at:1455:  $PREPARSER ./calc  input
-1.2: syntax error: invalid character: '#'
-input:
+  | 1 = 2 = 3
+./calc.at:1457:  $PREPARSER ./calc  input
+stderr:
 stderr:
+syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
+./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -166679,18 +166919,7 @@
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (1 + #) = 1111
-./calc.at:1449: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1451:  $PREPARSER ./calc  input
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -166909,12 +167138,7 @@
 Stack now 0 6 16
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1449: cat stderr
-stderr:
-input:
+syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
 ./calc.at:1455: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -166925,28 +167149,21 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | (1 + # + 1) = 1111
-1.6: syntax error: invalid character: '#'
-./calc.at:1449:  $PREPARSER ./calc  input
-stderr:
 ./calc.at:1455: cat stderr
+./calc.at:1457: cat stderr
 input:
-./calc.at:1451: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+input:
+  | 
+  | +1
+./calc.at:1457:  $PREPARSER ./calc  input
   | 1 + 2 * 3 + !+ ++
 ./calc.at:1455:  $PREPARSER ./calc  input
-1.6: syntax error: invalid character: '#'
-./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
+syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
 Starting parse
 Entering state 0
 Stack now 0
@@ -167030,20 +167247,8 @@
 Stack now 0 8 20
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-1.6: syntax error: invalid character: '#'
 ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1451: cat stderr
-./calc.at:1449: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 Starting parse
 Entering state 0
 Stack now 0
@@ -167128,14 +167333,17 @@
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
 ./calc.at:1455: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
+./calc.at:1457: cat stderr
+./calc.at:1457:  $PREPARSER ./calc  /dev/null
+stderr:
 input:
   | 1 + 2 * 3 + !- ++
+syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
 ./calc.at:1455:  $PREPARSER ./calc  input
-  | (# + 1) = 1111
-./calc.at:1451:  $PREPARSER ./calc  input
+./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
+syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
 Starting parse
 Entering state 0
 Stack now 0
@@ -167220,11 +167428,6 @@
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
 ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1449: cat stderr
-stderr:
-1.2: syntax error: invalid character: '#'
-./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
 stderr:
 Starting parse
 Entering state 0
@@ -167309,20 +167512,8 @@
 Stack now 0 8 20
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-  | (1 + 1) / (1 - 1)
-./calc.at:1449:  $PREPARSER ./calc  input
-1.2: syntax error: invalid character: '#'
-./calc.at:1451: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
+./calc.at:1457: cat stderr
+input:
 ./calc.at:1455: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -167333,29 +167524,21 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.11-17: error: null divisor
-./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.11-17: error: null divisor
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1457:  $PREPARSER ./calc  input
 ./calc.at:1455: cat stderr
-./calc.at:1451: cat stderr
-./calc.at:1449: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
 input:
   | (#) + (#) = 2222
 ./calc.at:1455:  $PREPARSER ./calc  input
 stderr:
-  | (1 + # + 1) = 1111
-./calc.at:1451:  $PREPARSER ./calc  input
+stderr:
+syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+error: 4444 != 1
+./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -167493,11 +167676,6 @@
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1449: cat stderr
-stderr:
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -167633,19 +167811,11 @@
 Stack now 0 6 16
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-1.6: syntax error: invalid character: '#'
-536. calc.at:1449:  ok
-
-./calc.at:1451: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+error: 4444 != 1
 ./calc.at:1455: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -167657,17 +167827,11 @@
   }eg
 ' expout || exit 77
 ./calc.at:1455: cat stderr
-./calc.at:1451: cat stderr
-input:
 input:
   | (1 + #) = 1111
 ./calc.at:1455:  $PREPARSER ./calc  input
-  | (1 + 1) / (1 - 1)
-./calc.at:1451:  $PREPARSER ./calc  input
-stderr:
-1.11-17: error: null divisor
-./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1457: cat stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -167775,8 +167939,6 @@
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.11-17: error: null divisor
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -167882,20 +168044,6 @@
 Stack now 0 6 16
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-546. calc.at:1476: testing Calculator C++ %glr-parser   ...
-./calc.at:1476: mv calc.y.tmp calc.y
-
-./calc.at:1451: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1476: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 ./calc.at:1455: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -167906,14 +168054,21 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1451: cat stderr
-537. calc.at:1451:  ok
+input:
+  | (!!) + (1 2) = 1
+./calc.at:1457:  $PREPARSER ./calc  input
+stderr:
+syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+error: 2222 != 1
+./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1455: cat stderr
 input:
   | (# + 1) = 1111
 ./calc.at:1455:  $PREPARSER ./calc  input
 stderr:
-
+syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+error: 2222 != 1
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -168132,6 +168287,20 @@
 Stack now 0 6 16
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1457: cat stderr
+input:
+  | (- *) + (1 2) = 1
+./calc.at:1457:  $PREPARSER ./calc  input
+stderr:
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+error: 2222 != 1
+./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+error: 2222 != 1
+./calc.at:1457: cat stderr
 ./calc.at:1455: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -168144,9 +168313,17 @@
 ' expout || exit 77
 ./calc.at:1455: cat stderr
 input:
+input:
+  | (* *) + (*) + (*)
+./calc.at:1457:  $PREPARSER ./calc  input
   | (1 + # + 1) = 1111
 ./calc.at:1455:  $PREPARSER ./calc  input
 stderr:
+stderr:
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -168276,6 +168453,10 @@
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
@@ -168413,8 +168594,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1457: cat stderr
 ./calc.at:1455: cat stderr
 input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1457:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1457: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+input:
   | (1 + 1) / (1 - 1)
 ./calc.at:1455:  $PREPARSER ./calc  input
 stderr:
@@ -168569,7 +168758,12 @@
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
 stderr:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1457:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -168720,11 +168914,7 @@
 Stack now 0 6 16
 Cleanup: popping token end of file (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-547. calc.at:1476: testing Calculator glr2.cc   ...
-./calc.at:1476: mv calc.y.tmp calc.y
-
-./calc.at:1476: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1476: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+stderr:
 ./calc.at:1455: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -168737,16 +168927,34 @@
 ' expout || exit 77
 ./calc.at:1455: cat stderr
 540. calc.at:1455:  ok
+./calc.at:1457: cat stderr
+input:
+  | (#) + (#) = 2222
+./calc.at:1457:  $PREPARSER ./calc  input
+stderr:
 
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1457: cat stderr
+input:
+  | (1 + #) = 1111
+./calc.at:1457:  $PREPARSER ./calc  input
+stderr:
+syntax error: invalid character: '#'
+./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 548. calc.at:1477: testing Calculator C++ %glr-parser %locations  ...
 ./calc.at:1477: mv calc.y.tmp calc.y
 
+stderr:
 ./calc.at:1477: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1476: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
-./calc.at:1477: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+syntax error: invalid character: '#'
 stderr:
 stdout:
-./calc.at:1457: "$PERL" -ne '
+./calc.at:1458: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -168757,6 +168965,11 @@
         || /\t/
         )' calc.cc
 
+./calc.at:1457: cat stderr
+input:
+  | (# + 1) = 1111
+./calc.at:1457:  $PREPARSER ./calc  input
+stderr:
 input:
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
@@ -168771,189 +168984,98 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1457:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1457: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | 1 2
-./calc.at:1457:  $PREPARSER ./calc  input
-stderr:
-syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
-./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
-./calc.at:1457: cat stderr
-input:
-  | 1//2
-./calc.at:1457:  $PREPARSER ./calc  input
-stderr:
-syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1457: cat stderr
-input:
-  | error
-./calc.at:1457:  $PREPARSER ./calc  input
-stderr:
-syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+syntax error: invalid character: '#'
 ./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1458:  $PREPARSER ./calc  input
 stderr:
-syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1457: cat stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1457:  $PREPARSER ./calc  input
 stderr:
-syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
-./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1477: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+syntax error: invalid character: '#'
+./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
+./calc.at:1458: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 ./calc.at:1457: cat stderr
 input:
-  | 
-  | +1
-./calc.at:1457:  $PREPARSER ./calc  input
-stderr:
-syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1457: cat stderr
-./calc.at:1457:  $PREPARSER ./calc  /dev/null
-stderr:
-syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1457: cat stderr
 input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+  | 1 2
+./calc.at:1458:  $PREPARSER ./calc  input
+  | (1 + # + 1) = 1111
 ./calc.at:1457:  $PREPARSER ./calc  input
 stderr:
-syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-error: 4444 != 1
-./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-error: 4444 != 1
-./calc.at:1457: cat stderr
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1457:  $PREPARSER ./calc  input
+1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
 stderr:
-syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-error: 2222 != 1
+syntax error: invalid character: '#'
 ./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-error: 2222 != 1
-./calc.at:1457: cat stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1457:  $PREPARSER ./calc  input
-stderr:
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-error: 2222 != 1
-./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-error: 2222 != 1
+syntax error: invalid character: '#'
+1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+./calc.at:1458: cat stderr
 ./calc.at:1457: cat stderr
 input:
-  | (* *) + (*) + (*)
-./calc.at:1457:  $PREPARSER ./calc  input
-stderr:
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1457: cat stderr
 input:
-  | 1 + 2 * 3 + !+ ++
+  | (1 + 1) / (1 - 1)
 ./calc.at:1457:  $PREPARSER ./calc  input
+  | 1//2
+./calc.at:1458:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1457: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1457:  $PREPARSER ./calc  input
 stderr:
+1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
+error: null divisor
 ./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1457: cat stderr
-input:
-  | (#) + (#) = 2222
-./calc.at:1457:  $PREPARSER ./calc  input
-stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
+1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
+error: null divisor
+./calc.at:1458: cat stderr
 ./calc.at:1457: cat stderr
 input:
-  | (1 + #) = 1111
-./calc.at:1457:  $PREPARSER ./calc  input
+  | error
+541. calc.at:1457:  ok
+./calc.at:1458:  $PREPARSER ./calc  input
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1457: cat stderr
+
+1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1458: cat stderr
 input:
-  | (# + 1) = 1111
-./calc.at:1457:  $PREPARSER ./calc  input
+  | 1 = 2 = 3
+./calc.at:1458:  $PREPARSER ./calc  input
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
+./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+549. calc.at:1477: testing Calculator glr2.cc %locations  ...
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1457: cat stderr
+1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
+./calc.at:1477: mv calc.y.tmp calc.y
+
+./calc.at:1477: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+./calc.at:1458: cat stderr
 input:
-  | (1 + # + 1) = 1111
-./calc.at:1457:  $PREPARSER ./calc  input
+  | 
+  | +1
+./calc.at:1458:  $PREPARSER ./calc  input
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1457: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1457:  $PREPARSER ./calc  input
+2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1458: cat stderr
+./calc.at:1477: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+./calc.at:1458:  $PREPARSER ./calc  /dev/null
 stderr:
-error: null divisor
-./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-error: null divisor
-./calc.at:1457: cat stderr
-541. calc.at:1457:  ok
-
-549. calc.at:1477: testing Calculator glr2.cc %locations  ...
-./calc.at:1477: mv calc.y.tmp calc.y
-
-./calc.at:1477: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1477: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1458: cat stderr
 stderr:
 stdout:
-./calc.at:1458: "$PERL" -ne '
+./calc.at:1459: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -168965,6 +169087,8 @@
         )' calc.cc
 
 input:
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -168978,68 +169102,239 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
+./calc.at:1459:  $PREPARSER ./calc  input
 ./calc.at:1458:  $PREPARSER ./calc  input
 stderr:
+stderr:
+./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.1-46: error: 4444 != 1
+stderr:
 ./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1459: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 stderr:
-./calc.at:1458: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.1-46: error: 4444 != 1
 input:
   | 1 2
-./calc.at:1458:  $PREPARSER ./calc  input
+./calc.at:1459:  $PREPARSER ./calc  input
+./calc.at:1458: cat stderr
 stderr:
 1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
-./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
 stderr:
+  | (!!) + (1 2) = 1
+./calc.at:1458:  $PREPARSER ./calc  input
 1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
-./calc.at:1458: cat stderr
+stderr:
+1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-16: error: 2222 != 1
+./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1459: cat stderr
+stderr:
+1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-16: error: 2222 != 1
 input:
   | 1//2
-./calc.at:1458:  $PREPARSER ./calc  input
+./calc.at:1459:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1458: cat stderr
 1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1458: cat stderr
 input:
-  | error
+1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
+  | (- *) + (1 2) = 1
 ./calc.at:1458:  $PREPARSER ./calc  input
 stderr:
-1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-17: error: 2222 != 1
+./calc.at:1459: cat stderr
 ./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+input:
+1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-17: error: 2222 != 1
+  | error
+./calc.at:1459:  $PREPARSER ./calc  input
+stderr:
 1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./calc.at:1458: cat stderr
+1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1459: cat stderr
 input:
-  | 1 = 2 = 3
+  | (* *) + (*) + (*)
 ./calc.at:1458:  $PREPARSER ./calc  input
+input:
 stderr:
-1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
+  | 1 = 2 = 3
+./calc.at:1459:  $PREPARSER ./calc  input
+1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+stderr:
+1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
+stderr:
+1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+./calc.at:1459: cat stderr
 ./calc.at:1458: cat stderr
 input:
   | 
   | +1
+./calc.at:1459:  $PREPARSER ./calc  input
+input:
+stderr:
+2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
+  | 1 + 2 * 3 + !+ ++
 ./calc.at:1458:  $PREPARSER ./calc  input
+./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 stderr:
 2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
 ./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1458: cat stderr
-./calc.at:1458:  $PREPARSER ./calc  /dev/null
+./calc.at:1458: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+./calc.at:1459: cat stderr
+input:
+./calc.at:1459:  $PREPARSER ./calc  /dev/null
 stderr:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1458:  $PREPARSER ./calc  input
 1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+stderr:
+./calc.at:1459: cat stderr
+./calc.at:1458: cat stderr
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1459:  $PREPARSER ./calc  input
+stderr:
+1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.1-46: error: 4444 != 1
+input:
+  | (#) + (#) = 2222
+./calc.at:1458:  $PREPARSER ./calc  input
+./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
+1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.1-46: error: 4444 != 1
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1459: cat stderr
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+input:
+  | (!!) + (1 2) = 1
+./calc.at:1459:  $PREPARSER ./calc  input
+./calc.at:1458: cat stderr
+stderr:
+1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-16: error: 2222 != 1
+input:
+  | (1 + #) = 1111
+./calc.at:1458:  $PREPARSER ./calc  input
+./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+1.6: syntax error: invalid character: '#'
+1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-16: error: 2222 != 1
+./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1459: cat stderr
+stderr:
+1.6: syntax error: invalid character: '#'
 ./calc.at:1458: cat stderr
+input:
+  | (- *) + (1 2) = 1
+./calc.at:1459:  $PREPARSER ./calc  input
+input:
+stderr:
+  | (# + 1) = 1111
+./calc.at:1458:  $PREPARSER ./calc  input
+stderr:
+1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-17: error: 2222 != 1
+./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.2: syntax error: invalid character: '#'
+1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+1.1-17: error: 2222 != 1
+./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.2: syntax error: invalid character: '#'
+./calc.at:1459: cat stderr
+./calc.at:1458: cat stderr
+input:
+  | (* *) + (*) + (*)
+./calc.at:1459:  $PREPARSER ./calc  input
+stderr:
+input:
+1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+  | (1 + # + 1) = 1111
+./calc.at:1458:  $PREPARSER ./calc  input
+./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.6: syntax error: invalid character: '#'
+stderr:
+./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 stderr:
+1.6: syntax error: invalid character: '#'
+./calc.at:1459: cat stderr
+./calc.at:1458: cat stderr
+input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1459:  $PREPARSER ./calc  input
 input:
+  | (1 + 1) / (1 - 1)
+stderr:
+./calc.at:1458:  $PREPARSER ./calc  input
+stderr:
+1.11-17: error: null divisor
+./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+./calc.at:1459: $EGREP -c -v 'Return for a new token:|LAC:' stderr
 stdout:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+stderr:
+1.11-17: error: null divisor
 ./calc.at:1468: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
@@ -169051,13 +169346,13 @@
         || /\t/
         )' calc.cc
 
-./calc.at:1458:  $PREPARSER ./calc  input
+input:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1459:  $PREPARSER ./calc  input
+./calc.at:1458: cat stderr
 stderr:
-1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.1-46: error: 4444 != 1
+542. calc.at:1458:  ok
+./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
@@ -169073,35 +169368,28 @@
   | 2^2^3 = 256
   | (2^2)^3 = 64
 ./calc.at:1468:  $PREPARSER ./calc  input
-./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
 ./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.1-46: error: 4444 != 1
 stderr:
-./calc.at:1458: cat stderr
 input:
+./calc.at:1459: cat stderr
   | 1 2
+
 ./calc.at:1468:  $PREPARSER ./calc  input
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1458:  $PREPARSER ./calc  input
-stderr:
-1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-16: error: 2222 != 1
 stderr:
 syntax error
-./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
 stderr:
-stderr:
+  | (#) + (#) = 2222
+./calc.at:1459:  $PREPARSER ./calc  input
 syntax error
-1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-16: error: 2222 != 1
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./calc.at:1468: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -169112,36 +169400,17 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1458: cat stderr
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
 ./calc.at:1468: cat stderr
 input:
-  | (- *) + (1 2) = 1
-./calc.at:1458:  $PREPARSER ./calc  input
-input:
-stderr:
-1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-17: error: 2222 != 1
   | 1//2
 ./calc.at:1468:  $PREPARSER ./calc  input
-./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 stderr:
-1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-17: error: 2222 != 1
 syntax error
 ./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1458: cat stderr
 stderr:
 syntax error
-input:
-  | (* *) + (*) + (*)
-./calc.at:1458:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 ./calc.at:1468: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -169152,28 +169421,28 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+./calc.at:1459: cat stderr
+input:
+  | (1 + #) = 1111
+./calc.at:1459:  $PREPARSER ./calc  input
 ./calc.at:1468: cat stderr
-./calc.at:1458: cat stderr
+stderr:
 input:
   | error
 ./calc.at:1468:  $PREPARSER ./calc  input
 stderr:
-input:
 syntax error
 ./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1458:  $PREPARSER ./calc  input
 stderr:
 syntax error
+1.6: syntax error: invalid character: '#'
+./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+550. calc.at:1478: testing Calculator C++ %glr-parser %locations api.location.type={Span}  ...
 stderr:
-./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1458: $EGREP -c -v 'Return for a new token:|LAC:' stderr
+./calc.at:1478: mv calc.y.tmp calc.y
+
+./calc.at:1478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+1.6: syntax error: invalid character: '#'
 ./calc.at:1468: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -169184,12 +169453,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
+./calc.at:1459: cat stderr
 input:
+  | (# + 1) = 1111
+./calc.at:1459:  $PREPARSER ./calc  input
+./calc.at:1468: cat stderr
+stderr:
+stderr:
 stdout:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1458:  $PREPARSER ./calc  input
-./calc.at:1459: "$PERL" -ne '
+1.2: syntax error: invalid character: '#'
+./calc.at:1476: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -169200,9 +169473,13 @@
         || /\t/
         )' calc.cc
 
-./calc.at:1468: cat stderr
+./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 1 = 2 = 3
+./calc.at:1468:  $PREPARSER ./calc  input
 stderr:
 input:
+stderr:
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -169216,22 +169493,24 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1459:  $PREPARSER ./calc  input
-input:
-./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1 = 2 = 3
-./calc.at:1468:  $PREPARSER ./calc  input
-stderr:
-stderr:
+./calc.at:1476:  $PREPARSER ./calc  input
+1.2: syntax error: invalid character: '#'
 syntax error
 ./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1478: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 stderr:
+stderr:
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 syntax error
 stderr:
+./calc.at:1459: cat stderr
+input:
+  | 1 2
+./calc.at:1476:  $PREPARSER ./calc  input
+input:
 stderr:
-./calc.at:1459: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-./calc.at:1458: cat stderr
+  | (1 + # + 1) = 1111
+./calc.at:1459:  $PREPARSER ./calc  input
 ./calc.at:1468: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -169242,48 +169521,51 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | (#) + (#) = 2222
-./calc.at:1458:  $PREPARSER ./calc  input
-input:
+syntax error
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-  | 1 2
-./calc.at:1459:  $PREPARSER ./calc  input
-./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.6: syntax error: invalid character: '#'
+./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1468: cat stderr
+syntax error
 stderr:
-1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
-./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.6: syntax error: invalid character: '#'
+./calc.at:1468: cat stderr
+./calc.at:1476: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 input:
-./calc.at:1458: cat stderr
+./calc.at:1459: cat stderr
   | 
   | +1
 ./calc.at:1468:  $PREPARSER ./calc  input
 stderr:
-stderr:
-1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
 syntax error
 ./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1459:  $PREPARSER ./calc  input
 stderr:
-  | (1 + #) = 1111
-./calc.at:1458:  $PREPARSER ./calc  input
-./calc.at:1459: cat stderr
+./calc.at:1476: cat stderr
 syntax error
 stderr:
-1.6: syntax error: invalid character: '#'
+1.11-17: error: null divisor
+./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 1//2
-./calc.at:1459:  $PREPARSER ./calc  input
+./calc.at:1476:  $PREPARSER ./calc  input
 stderr:
-1.6: syntax error: invalid character: '#'
+1.11-17: error: null divisor
 stderr:
+syntax error
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1468: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -169294,34 +169576,37 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1458: cat stderr
-./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
 stderr:
+syntax error
+./calc.at:1459: cat stderr
 ./calc.at:1468: cat stderr
-  | (# + 1) = 1111
-1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1458:  $PREPARSER ./calc  input
+543. calc.at:1459:  ok
 ./calc.at:1468:  $PREPARSER ./calc  /dev/null
+./calc.at:1476: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
-stderr:
-1.2: syntax error: invalid character: '#'
-./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1459: cat stderr
+./calc.at:1476: cat stderr
 syntax error
 ./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
+input:
 stderr:
+  | error
+./calc.at:1476:  $PREPARSER ./calc  input
 stderr:
-1.2: syntax error: invalid character: '#'
 syntax error
-input:
-  | error
-./calc.at:1459:  $PREPARSER ./calc  input
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1458: cat stderr
-1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
+syntax error
 ./calc.at:1468: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -169332,21 +169617,29 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1458:  $PREPARSER ./calc  input
-1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-stderr:
-1.6: syntax error: invalid character: '#'
+./calc.at:1476: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1468: cat stderr
-./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.6: syntax error: invalid character: '#'
+./calc.at:1476: cat stderr
+551. calc.at:1478: testing Calculator glr2.cc %locations api.location.type={Span}  ...
+./calc.at:1478: mv calc.y.tmp calc.y
+
+./calc.at:1478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+input:
 input:
-./calc.at:1459: cat stderr
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 ./calc.at:1468:  $PREPARSER ./calc  input
+  | 1 = 2 = 3
+./calc.at:1476:  $PREPARSER ./calc  input
+stderr:
 stderr:
 syntax error
 syntax error
@@ -169354,26 +169647,16 @@
 syntax error
 error: 4444 != 1
 ./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1459:  $PREPARSER ./calc  input
-./calc.at:1458: cat stderr
-stderr:
-1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+syntax error
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 syntax error
 syntax error
 syntax error
 syntax error
 error: 4444 != 1
 stderr:
-1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1458:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1459: cat stderr
+syntax error
 ./calc.at:1468: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -169384,40 +169667,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.11-17: error: null divisor
-input:
-  | 
-  | +1
-./calc.at:1459:  $PREPARSER ./calc  input
-./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
-stderr:
-./calc.at:1468: cat stderr
-1.11-17: error: null divisor
-./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-input:
-2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
-  | (!!) + (1 2) = 1
-./calc.at:1468:  $PREPARSER ./calc  input
-./calc.at:1458: cat stderr
-./calc.at:1459: cat stderr
-stderr:
-syntax error
-error: 2222 != 1
-./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1459:  $PREPARSER ./calc  /dev/null
-stderr:
-542. calc.at:1458:  ok
-1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-stderr:
-./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-syntax error
-error: 2222 != 1
-stderr:
-
-./calc.at:1468: "$PERL" -pi -e 'use strict;
+./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -169427,37 +169677,28 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+./calc.at:1476: cat stderr
 ./calc.at:1468: cat stderr
+./calc.at:1478: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 input:
-  | (- *) + (1 2) = 1
+input:
+  | (!!) + (1 2) = 1
 ./calc.at:1468:  $PREPARSER ./calc  input
-./calc.at:1459: cat stderr
+  | 
+  | +1
 stderr:
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1459:  $PREPARSER ./calc  input
-syntax error
+./calc.at:1476:  $PREPARSER ./calc  input
 syntax error
 error: 2222 != 1
 ./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
-1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.1-46: error: 4444 != 1
-./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 syntax error
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 syntax error
 error: 2222 != 1
 stderr:
-1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.1-46: error: 4444 != 1
+syntax error
 ./calc.at:1468: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -169468,52 +169709,37 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1459: cat stderr
-550. calc.at:1478: testing Calculator C++ %glr-parser %locations api.location.type={Span}  ...
-input:
-./calc.at:1478: mv calc.y.tmp calc.y
-
+./calc.at:1476: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1468: cat stderr
-  | (!!) + (1 2) = 1
-./calc.at:1459:  $PREPARSER ./calc  input
-./calc.at:1478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-stderr:
-input:
-  | (* *) + (*) + (*)
-./calc.at:1468:  $PREPARSER ./calc  input
-1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-16: error: 2222 != 1
-./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1476: cat stderr
+./calc.at:1476:  $PREPARSER ./calc  /dev/null
 stderr:
-1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-16: error: 2222 != 1
-./calc.at:1459: cat stderr
 input:
+syntax error
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | (- *) + (1 2) = 1
-./calc.at:1459:  $PREPARSER ./calc  input
+./calc.at:1468:  $PREPARSER ./calc  input
 stderr:
 stderr:
 syntax error
 syntax error
 syntax error
+error: 2222 != 1
 ./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-17: error: 2222 != 1
-syntax error
 syntax error
 syntax error
-./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-1.1-17: error: 2222 != 1
-./calc.at:1459: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1459:  $PREPARSER ./calc  input
-./calc.at:1468: "$PERL" -pi -e 'use strict;
+error: 2222 != 1
+./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -169523,41 +169749,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1468: cat stderr
-stderr:
-1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1468:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1459: cat stderr
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1468:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1459:  $PREPARSER ./calc  input
-stderr:
-stderr:
-./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1459: $EGREP -c -v 'Return for a new token:|LAC:' stderr
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1459:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1468: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -169568,31 +169759,36 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
+./calc.at:1476: cat stderr
 ./calc.at:1468: cat stderr
-./calc.at:1459: cat stderr
 input:
-./calc.at:1478: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
-  | (#) + (#) = 2222
 input:
-./calc.at:1459:  $PREPARSER ./calc  input
-  | (#) + (#) = 2222
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1476:  $PREPARSER ./calc  input
+  | (* *) + (*) + (*)
 ./calc.at:1468:  $PREPARSER ./calc  input
 stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
+syntax error
+syntax error
+syntax error
+syntax error
+error: 4444 != 1
+syntax error
+syntax error
+syntax error
 ./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1459: cat stderr
+syntax error
+syntax error
+syntax error
+syntax error
+syntax error
+syntax error
+syntax error
+error: 4444 != 1
 ./calc.at:1468: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -169603,35 +169799,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | (1 + #) = 1111
-./calc.at:1459:  $PREPARSER ./calc  input
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1468: cat stderr
-./calc.at:1459: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1459:  $PREPARSER ./calc  input
-input:
-  | (1 + #) = 1111
-./calc.at:1468:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error: invalid character: '#'
-stderr:
-./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-syntax error: invalid character: '#'
-./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error: invalid character: '#'
-stderr:
-./calc.at:1459: cat stderr
-syntax error: invalid character: '#'
-input:
-./calc.at:1468: "$PERL" -pi -e 'use strict;
+./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -169641,53 +169809,31 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | (1 + # + 1) = 1111
-./calc.at:1459:  $PREPARSER ./calc  input
-stderr:
-1.6: syntax error: invalid character: '#'
 ./calc.at:1468: cat stderr
-./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | (# + 1) = 1111
-stderr:
+  | 1 + 2 * 3 + !+ ++
 ./calc.at:1468:  $PREPARSER ./calc  input
-1.6: syntax error: invalid character: '#'
-./calc.at:1459: cat stderr
+./calc.at:1476: cat stderr
 stderr:
-syntax error: invalid character: '#'
 ./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error: invalid character: '#'
 input:
-./calc.at:1468: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-  | (1 + 1) / (1 - 1)
-./calc.at:1459:  $PREPARSER ./calc  input
-./calc.at:1468: cat stderr
 stderr:
+  | (!!) + (1 2) = 1
+./calc.at:1476:  $PREPARSER ./calc  input
+stderr:
+syntax error
+error: 2222 != 1
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-1.11-17: error: null divisor
-  | (1 + # + 1) = 1111
-./calc.at:1468:  $PREPARSER ./calc  input
-./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1468:  $PREPARSER ./calc  input
+syntax error
+error: 2222 != 1
 stderr:
-1.11-17: error: null divisor
-syntax error: invalid character: '#'
-./calc.at:1459: cat stderr
 ./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
-543. calc.at:1459:  ok
-./calc.at:1468: "$PERL" -pi -e 'use strict;
+./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -169697,19 +169843,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-
-./calc.at:1468: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1468:  $PREPARSER ./calc  input
-stderr:
-error: null divisor
-./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-error: null divisor
-551. calc.at:1478: testing Calculator glr2.cc %locations api.location.type={Span}  ...
-./calc.at:1478: mv calc.y.tmp calc.y
-
+./calc.at:1476: cat stderr
 ./calc.at:1468: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -169720,55 +169854,22 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1468: cat stderr
-544. calc.at:1468:  ok
-
-./calc.at:1478: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
-552. calc.at:1479: testing Calculator C++ %glr-parser %header parse.error=verbose %name-prefix "calc" %verbose  ...
-./calc.at:1479: mv calc.y.tmp calc.y
-
-./calc.at:1479: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1479: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
-stderr:
-stdout:
-./calc.at:1476: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc
-
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1476:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 input:
-  | 1 2
+  | (- *) + (1 2) = 1
 ./calc.at:1476:  $PREPARSER ./calc  input
 stderr:
 syntax error
+syntax error
+error: 2222 != 1
 ./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1468: cat stderr
 syntax error
+syntax error
+error: 2222 != 1
+input:
+  | (#) + (#) = 2222
+./calc.at:1468:  $PREPARSER ./calc  input
 ./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -169779,16 +169880,20 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
 ./calc.at:1476: cat stderr
+stderr:
 input:
-  | 1//2
+  | (* *) + (*) + (*)
 ./calc.at:1476:  $PREPARSER ./calc  input
-stderr:
-syntax error
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-./calc.at:1476: "$PERL" -pi -e 'use strict;
+stdout:
+./calc.at:1468: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -169798,10 +169903,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1476: cat stderr
 stderr:
-stdout:
-input:
 ./calc.at:1477: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
@@ -169813,14 +169915,11 @@
         || /\t/
         )' calc.cc
 
-  | error
-./calc.at:1476:  $PREPARSER ./calc  input
-stderr:
+syntax error
+syntax error
 syntax error
 ./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-stderr:
-syntax error
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -169835,6 +169934,19 @@
   | 2^2^3 = 256
   | (2^2)^3 = 64
 ./calc.at:1477:  $PREPARSER ./calc  input
+stderr:
+syntax error
+syntax error
+syntax error
+stderr:
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1468: cat stderr
+stderr:
+input:
+  | 1 2
+./calc.at:1477:  $PREPARSER ./calc  input
+input:
+stderr:
 ./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -169845,25 +169957,19 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-./calc.at:1476: cat stderr
+  | (1 + #) = 1111
+./calc.at:1468:  $PREPARSER ./calc  input
+1.3: syntax error
 ./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-input:
-input:
-  | 1 = 2 = 3
-./calc.at:1476:  $PREPARSER ./calc  input
-stderr:
-  | 1 2
-./calc.at:1477:  $PREPARSER ./calc  input
-syntax error
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+syntax error: invalid character: '#'
+./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error
 1.3: syntax error
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1476: "$PERL" -pi -e 'use strict;
+stderr:
+syntax error: invalid character: '#'
+./calc.at:1476: cat stderr
+./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -169873,14 +169979,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-./calc.at:1476: cat stderr
-1.3: syntax error
 input:
-  | 
-  | +1
+  | 1 + 2 * 3 + !+ ++
 ./calc.at:1476:  $PREPARSER ./calc  input
-./calc.at:1477: "$PERL" -pi -e 'use strict;
+stderr:
+./calc.at:1468: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -169890,17 +169993,33 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-syntax error
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1477: cat stderr
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error
 input:
+./calc.at:1468: cat stderr
+  | 1 + 2 * 3 + !- ++
+./calc.at:1476:  $PREPARSER ./calc  input
+input:
+stderr:
   | 1//2
 ./calc.at:1477:  $PREPARSER ./calc  input
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1476: "$PERL" -pi -e 'use strict;
+input:
+1.3: syntax error
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (# + 1) = 1111
+./calc.at:1468:  $PREPARSER ./calc  input
+stderr:
+stderr:
+syntax error: invalid character: '#'
+./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+1.3: syntax error
+syntax error: invalid character: '#'
+./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -169910,15 +170029,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.3: syntax error
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1476: cat stderr
-stderr:
-./calc.at:1476:  $PREPARSER ./calc  /dev/null
-stderr:
-1.3: syntax error
-syntax error
-./calc.at:1477: "$PERL" -pi -e 'use strict;
+./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -169928,11 +170039,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 ./calc.at:1477: cat stderr
-syntax error
-./calc.at:1476: "$PERL" -pi -e 'use strict;
+./calc.at:1476: cat stderr
+./calc.at:1468: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -169943,24 +170052,28 @@
   }eg
 ' expout || exit 77
 input:
+input:
   | error
 ./calc.at:1477:  $PREPARSER ./calc  input
+  | (#) + (#) = 2222
+./calc.at:1476:  $PREPARSER ./calc  input
+./calc.at:1468: cat stderr
+stderr:
 stderr:
-./calc.at:1476: cat stderr
 1.1: syntax error
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
 input:
 1.1: syntax error
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1476:  $PREPARSER ./calc  input
+  | (1 + # + 1) = 1111
+./calc.at:1468:  $PREPARSER ./calc  input
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
 stderr:
-syntax error
-syntax error
-syntax error
-syntax error
-error: 4444 != 1
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -169971,16 +170084,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+syntax error: invalid character: '#'
+./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error
-syntax error
-syntax error
-syntax error
-error: 4444 != 1
-./calc.at:1477: cat stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1477:  $PREPARSER ./calc  input
 ./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -169991,13 +170097,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-1.7: syntax error
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.7: syntax error
+syntax error: invalid character: '#'
+./calc.at:1477: cat stderr
 ./calc.at:1476: cat stderr
-./calc.at:1477: "$PERL" -pi -e 'use strict;
+./calc.at:1468: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -170008,21 +170111,40 @@
   }eg
 ' expout || exit 77
 input:
-  | (!!) + (1 2) = 1
+  | 1 = 2 = 3
+./calc.at:1477:  $PREPARSER ./calc  input
+input:
+  | (1 + #) = 1111
+stderr:
 ./calc.at:1476:  $PREPARSER ./calc  input
+1.7: syntax error
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1468: cat stderr
 stderr:
-syntax error
-error: 2222 != 1
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error: invalid character: '#'
 stderr:
-syntax error
-error: 2222 != 1
-./calc.at:1477: cat stderr
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.7: syntax error
 input:
-  | 
-  | +1
-./calc.at:1477:  $PREPARSER ./calc  input
 stderr:
+  | (1 + 1) / (1 - 1)
+./calc.at:1468:  $PREPARSER ./calc  input
+syntax error: invalid character: '#'
+stderr:
+./calc.at:1477: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+error: null divisor
+./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+error: null divisor
 ./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170033,20 +170155,37 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-2.1: syntax error
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1477: cat stderr
+./calc.at:1468: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
 ./calc.at:1476: cat stderr
+  | 
+  | +1
+./calc.at:1477:  $PREPARSER ./calc  input
 stderr:
 2.1: syntax error
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1468: cat stderr
 input:
-  | (- *) + (1 2) = 1
+  | (# + 1) = 1111
 ./calc.at:1476:  $PREPARSER ./calc  input
 stderr:
-syntax error
-syntax error
-error: 2222 != 1
+544. calc.at:1468:  ok
+stderr:
+2.1: syntax error
+syntax error: invalid character: '#'
 ./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+syntax error: invalid character: '#'
 ./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170057,12 +170196,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-syntax error
-syntax error
-error: 2222 != 1
-./calc.at:1477: cat stderr
-./calc.at:1477:  $PREPARSER ./calc  /dev/null
-stderr:
+
 ./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170073,19 +170207,17 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1477: cat stderr
+./calc.at:1477:  $PREPARSER ./calc  /dev/null
+stderr:
 1.1: syntax error
 ./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./calc.at:1476: cat stderr
+1.1: syntax error
 input:
-  | (* *) + (*) + (*)
+  | (1 + # + 1) = 1111
 ./calc.at:1476:  $PREPARSER ./calc  input
-stderr:
-1.1: syntax error
-stderr:
-syntax error
-syntax error
-syntax error
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170097,11 +170229,25 @@
   }eg
 ' expout || exit 77
 stderr:
-syntax error
-syntax error
-syntax error
+552. calc.at:1479: testing Calculator C++ %glr-parser %header parse.error=verbose %name-prefix "calc" %verbose  ...
+./calc.at:1479: mv calc.y.tmp calc.y
+
+syntax error: invalid character: '#'
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1479: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+stderr:
+syntax error: invalid character: '#'
 ./calc.at:1477: cat stderr
 input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1477:  $PREPARSER ./calc  input
+stderr:
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170112,25 +170258,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1477:  $PREPARSER ./calc  input
 stderr:
 1.2: syntax error
 1.18: syntax error
 1.23: syntax error
 1.41: syntax error
 1.1-46: error: 4444 != 1
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1476: cat stderr
-stderr:
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1476:  $PREPARSER ./calc  input
 ./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170141,27 +170275,24 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1476:  $PREPARSER ./calc  input
 stderr:
+error: null divisor
 ./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+./calc.at:1479: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
 ./calc.at:1477: cat stderr
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1476:  $PREPARSER ./calc  input
 stderr:
+error: null divisor
 input:
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | (!!) + (1 2) = 1
 ./calc.at:1477:  $PREPARSER ./calc  input
 stderr:
-stderr:
 1.11: syntax error
 1.1-16: error: 2222 != 1
 ./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.11: syntax error
-1.1-16: error: 2222 != 1
-./calc.at:1477: "$PERL" -pi -e 'use strict;
+./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -170171,7 +170302,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1476: "$PERL" -pi -e 'use strict;
+stderr:
+1.11: syntax error
+1.1-16: error: 2222 != 1
+./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -170181,27 +170315,19 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1477: cat stderr
 ./calc.at:1476: cat stderr
-input:
-  | (#) + (#) = 2222
-./calc.at:1476:  $PREPARSER ./calc  input
+./calc.at:1477: cat stderr
+546. calc.at:1476:  ok
 input:
   | (- *) + (1 2) = 1
 ./calc.at:1477:  $PREPARSER ./calc  input
+
 stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-stderr:
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 1.4: syntax error
 1.12: syntax error
 1.1-17: error: 2222 != 1
 ./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
 1.4: syntax error
 1.12: syntax error
 1.1-17: error: 2222 != 1
@@ -170215,38 +170341,23 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1476: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 ./calc.at:1477: cat stderr
 input:
   | (* *) + (*) + (*)
+553. calc.at:1479: testing Calculator glr2.cc %header parse.error=verbose %name-prefix "calc" %verbose  ...
 ./calc.at:1477:  $PREPARSER ./calc  input
-./calc.at:1476: cat stderr
+./calc.at:1479: mv calc.y.tmp calc.y
+
 stderr:
 1.2: syntax error
 1.10: syntax error
 1.16: syntax error
 ./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-stderr:
-  | (1 + #) = 1111
-./calc.at:1476:  $PREPARSER ./calc  input
+./calc.at:1479: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 stderr:
 1.2: syntax error
 1.10: syntax error
 1.16: syntax error
-syntax error: invalid character: '#'
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error: invalid character: '#'
 ./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170258,37 +170369,32 @@
   }eg
 ' expout || exit 77
 ./calc.at:1477: cat stderr
-./calc.at:1476: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 input:
   | 1 + 2 * 3 + !+ ++
 ./calc.at:1477:  $PREPARSER ./calc  input
-./calc.at:1476: cat stderr
 stderr:
 ./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-stderr:
-  | (# + 1) = 1111
-./calc.at:1476:  $PREPARSER ./calc  input
+./calc.at:1479: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
 stderr:
 input:
-syntax error: invalid character: '#'
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 1 + 2 * 3 + !- ++
 ./calc.at:1477:  $PREPARSER ./calc  input
 stderr:
 ./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
 stderr:
+stdout:
+./calc.at:1469: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc
+
 ./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170299,36 +170405,42 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1476: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1476: cat stderr
-./calc.at:1477: cat stderr
 input:
-  | (1 + # + 1) = 1111
-./calc.at:1476:  $PREPARSER ./calc  input
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1469:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1477: cat stderr
+./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
   | (#) + (#) = 2222
 ./calc.at:1477:  $PREPARSER ./calc  input
-stderr:
+input:
+  | 1 2
+./calc.at:1469:  $PREPARSER ./calc  input
 stderr:
 1.2: syntax error: invalid character: '#'
 1.8: syntax error: invalid character: '#'
 ./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-syntax error: invalid character: '#'
+stderr:
 stderr:
 1.2: syntax error: invalid character: '#'
 1.8: syntax error: invalid character: '#'
+syntax error
+./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170339,7 +170451,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1476: "$PERL" -pi -e 'use strict;
+syntax error
+./calc.at:1477: cat stderr
+./calc.at:1469: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -170349,22 +170463,52 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1477: cat stderr
+stderr:
+stdout:
 input:
   | (1 + #) = 1111
 ./calc.at:1477:  $PREPARSER ./calc  input
-./calc.at:1476: cat stderr
+./calc.at:1476: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc
+
 stderr:
 1.6: syntax error: invalid character: '#'
 ./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1469: cat stderr
 input:
 stderr:
+  | 1//2
+./calc.at:1469:  $PREPARSER ./calc  input
+stderr:
 1.6: syntax error: invalid character: '#'
-  | (1 + 1) / (1 - 1)
+input:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
 ./calc.at:1476:  $PREPARSER ./calc  input
+syntax error
+./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+syntax error
 stderr:
-error: null divisor
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170375,10 +170519,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-error: null divisor
-./calc.at:1477: cat stderr
-./calc.at:1476: "$PERL" -pi -e 'use strict;
+./calc.at:1469: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -170389,16 +170532,41 @@
   }eg
 ' expout || exit 77
 input:
+  | 1 2
+./calc.at:1476:  $PREPARSER ./calc  input
+./calc.at:1477: cat stderr
+stderr:
+./calc.at:1469: cat stderr
+syntax error
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | error
+./calc.at:1469:  $PREPARSER ./calc  input
+stderr:
+input:
   | (# + 1) = 1111
 ./calc.at:1477:  $PREPARSER ./calc  input
-./calc.at:1476: cat stderr
 stderr:
+stderr:
+syntax error
+syntax error
+./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 1.2: syntax error: invalid character: '#'
 ./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-546. calc.at:1476:  ok
+./calc.at:1476: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
 stderr:
 1.2: syntax error: invalid character: '#'
-
+syntax error
 ./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170409,16 +170577,42 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1476: cat stderr
+./calc.at:1469: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+  | 1//2
+./calc.at:1476:  $PREPARSER ./calc  input
 ./calc.at:1477: cat stderr
+stderr:
+syntax error
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1469: cat stderr
+stderr:
 input:
+syntax error
   | (1 + # + 1) = 1111
 ./calc.at:1477:  $PREPARSER ./calc  input
 stderr:
+input:
 1.6: syntax error: invalid character: '#'
 ./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 = 2 = 3
+./calc.at:1469:  $PREPARSER ./calc  input
+stderr:
 stderr:
 1.6: syntax error: invalid character: '#'
-./calc.at:1477: "$PERL" -pi -e 'use strict;
+syntax error
+./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -170428,19 +170622,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-553. calc.at:1479: testing Calculator glr2.cc %header parse.error=verbose %name-prefix "calc" %verbose  ...
-./calc.at:1479: mv calc.y.tmp calc.y
-
-./calc.at:1477: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1477:  $PREPARSER ./calc  input
-./calc.at:1479: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 stderr:
-1.11-17: error: null divisor
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.11-17: error: null divisor
+syntax error
 ./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170451,18 +170634,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1477: cat stderr
-548. calc.at:1477:  ok
-
-./calc.at:1479: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
-554. calc.at:1480: testing Calculator C++ %glr-parser parse.error=verbose api.prefix={calc} %verbose  ...
-./calc.at:1480: mv calc.y.tmp calc.y
-
-./calc.at:1480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1480: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+./calc.at:1476: cat stderr
 stderr:
 stdout:
-./calc.at:1469: "$PERL" -ne '
+./calc.at:1478: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -170474,6 +170649,25 @@
         )' calc.cc
 
 input:
+./calc.at:1477: cat stderr
+./calc.at:1469: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | error
+./calc.at:1476:  $PREPARSER ./calc  input
+input:
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1477:  $PREPARSER ./calc  input
+stderr:
+stderr:
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -170487,19 +170681,20 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1469:  $PREPARSER ./calc  input
+./calc.at:1478:  $PREPARSER ./calc  input
+syntax error
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.11-17: error: null divisor
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-input:
-  | 1 2
-./calc.at:1469:  $PREPARSER ./calc  input
+1.11-17: error: null divisor
 stderr:
-syntax error
-./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 syntax error
-./calc.at:1469: "$PERL" -pi -e 'use strict;
+./calc.at:1469: cat stderr
+./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -170509,16 +170704,20 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1469: cat stderr
 input:
-  | 1//2
+  | 1 2
+./calc.at:1478:  $PREPARSER ./calc  input
+stderr:
+1.3: syntax error
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 
+  | +1
 ./calc.at:1469:  $PREPARSER ./calc  input
 stderr:
-syntax error
-./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1477: cat stderr
 stderr:
-syntax error
-./calc.at:1469: "$PERL" -pi -e 'use strict;
+./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -170528,16 +170727,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1469: cat stderr
-input:
-  | error
-./calc.at:1469:  $PREPARSER ./calc  input
-stderr:
+1.3: syntax error
 syntax error
 ./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+548. calc.at:1477:  ok
 stderr:
 syntax error
-./calc.at:1469: "$PERL" -pi -e 'use strict;
+./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -170547,27 +170743,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1469: cat stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1469:  $PREPARSER ./calc  input
-stderr:
-syntax error
-./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-stderr:
-stdout:
-./calc.at:1478: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc
+./calc.at:1476: cat stderr
 
 ./calc.at:1469: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
@@ -170580,42 +170756,25 @@
   }eg
 ' expout || exit 77
 input:
-./calc.at:1469: cat stderr
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1478:  $PREPARSER ./calc  input
-input:
-stderr:
-  | 
-  | +1
-./calc.at:1469:  $PREPARSER ./calc  input
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 = 2 = 3
+./calc.at:1476:  $PREPARSER ./calc  input
 stderr:
+syntax error
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1478: cat stderr
 stderr:
 syntax error
-./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1469: cat stderr
 input:
-  | 1 2
+  | 1//2
 ./calc.at:1478:  $PREPARSER ./calc  input
+./calc.at:1469:  $PREPARSER ./calc  /dev/null
 stderr:
 stderr:
-syntax error
 1.3: syntax error
 ./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.3: syntax error
-./calc.at:1469: "$PERL" -pi -e 'use strict;
+syntax error
+./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -170625,6 +170784,15 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+1.3: syntax error
+syntax error
+./calc.at:1476: cat stderr
+554. calc.at:1480: testing Calculator C++ %glr-parser parse.error=verbose api.prefix={calc} %verbose  ...
+./calc.at:1480: mv calc.y.tmp calc.y
+
 ./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170635,22 +170803,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1469: cat stderr
-./calc.at:1469:  $PREPARSER ./calc  /dev/null
-./calc.at:1478: cat stderr
-stderr:
-syntax error
-./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
+./calc.at:1480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+  | 
+  | +1
+./calc.at:1476:  $PREPARSER ./calc  input
 stderr:
-  | 1//2
-syntax error
-./calc.at:1478:  $PREPARSER ./calc  input
-stderr:
-1.3: syntax error
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.3: syntax error
 ./calc.at:1469: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170661,7 +170819,23 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1478: "$PERL" -pi -e 'use strict;
+syntax error
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1478: cat stderr
+stderr:
+./calc.at:1469: cat stderr
+syntax error
+input:
+  | error
+./calc.at:1478:  $PREPARSER ./calc  input
+stderr:
+input:
+1.1: syntax error
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1469:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -170671,13 +170845,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1469: cat stderr
-./calc.at:1478: cat stderr
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1469:  $PREPARSER ./calc  input
-input:
-  | error
 stderr:
 syntax error
 syntax error
@@ -170685,19 +170852,15 @@
 syntax error
 error: 4444 != 1
 ./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1478:  $PREPARSER ./calc  input
-stderr:
+1.1: syntax error
 stderr:
 syntax error
 syntax error
 syntax error
 syntax error
 error: 4444 != 1
-1.1: syntax error
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.1: syntax error
-./calc.at:1469: "$PERL" -pi -e 'use strict;
+./calc.at:1476: cat stderr
+./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -170707,7 +170870,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1478: "$PERL" -pi -e 'use strict;
+./calc.at:1476:  $PREPARSER ./calc  /dev/null
+stderr:
+./calc.at:1469: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -170717,26 +170882,39 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1469: cat stderr
+syntax error
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1480: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 ./calc.at:1478: cat stderr
-input:
+stderr:
+syntax error
 input:
   | 1 = 2 = 3
 ./calc.at:1478:  $PREPARSER ./calc  input
-  | (!!) + (1 2) = 1
-./calc.at:1469:  $PREPARSER ./calc  input
+./calc.at:1469: cat stderr
 stderr:
 1.7: syntax error
 ./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1476: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
-syntax error
-error: 2222 != 1
-./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
+input:
 1.7: syntax error
+  | (!!) + (1 2) = 1
+./calc.at:1469:  $PREPARSER ./calc  input
+stderr:
 syntax error
 error: 2222 != 1
+./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1476: cat stderr
 ./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170747,6 +170925,15 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+syntax error
+error: 2222 != 1
+input:
+./calc.at:1478: cat stderr
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1476:  $PREPARSER ./calc  input
+stderr:
+input:
 ./calc.at:1469: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170757,25 +170944,30 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1478: cat stderr
-./calc.at:1469: cat stderr
-input:
+syntax error
+syntax error
+syntax error
+syntax error
+error: 4444 != 1
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 
   | +1
 ./calc.at:1478:  $PREPARSER ./calc  input
-input:
 stderr:
-  | (- *) + (1 2) = 1
 2.1: syntax error
 ./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1469:  $PREPARSER ./calc  input
-stderr:
 stderr:
-2.1: syntax error
 syntax error
 syntax error
-error: 2222 != 1
-./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
+syntax error
+error: 4444 != 1
+stderr:
+./calc.at:1469: cat stderr
+2.1: syntax error
+input:
+  | (- *) + (1 2) = 1
+./calc.at:1469:  $PREPARSER ./calc  input
 ./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170786,11 +170978,31 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1476: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+syntax error
+syntax error
+error: 2222 != 1
+./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 syntax error
 syntax error
 error: 2222 != 1
+./calc.at:1476: cat stderr
 ./calc.at:1478: cat stderr
+input:
+  | (!!) + (1 2) = 1
+./calc.at:1476:  $PREPARSER ./calc  input
+./calc.at:1478:  $PREPARSER ./calc  /dev/null
 ./calc.at:1469: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170801,14 +171013,30 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1478:  $PREPARSER ./calc  /dev/null
+stderr:
+syntax error
+error: 2222 != 1
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 1.1: syntax error
 ./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+syntax error
+error: 2222 != 1
+stderr:
 ./calc.at:1469: cat stderr
 1.1: syntax error
 input:
+./calc.at:1476: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
   | (* *) + (*) + (*)
 ./calc.at:1469:  $PREPARSER ./calc  input
 ./calc.at:1478: "$PERL" -pi -e 'use strict;
@@ -170822,45 +171050,19 @@
   }eg
 ' expout || exit 77
 stderr:
-stderr:
 syntax error
 syntax error
 syntax error
 ./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stdout:
-./calc.at:1476: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc
-
 stderr:
 syntax error
 syntax error
 syntax error
+./calc.at:1476: cat stderr
 ./calc.at:1478: cat stderr
 input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
+  | (- *) + (1 2) = 1
 ./calc.at:1476:  $PREPARSER ./calc  input
-stderr:
-input:
 ./calc.at:1469: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170871,6 +171073,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+input:
+syntax error
+syntax error
+error: 2222 != 1
 ./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 ./calc.at:1478:  $PREPARSER ./calc  input
@@ -170882,24 +171089,19 @@
 1.41: syntax error
 1.1-46: error: 4444 != 1
 ./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
+syntax error
+syntax error
+error: 2222 != 1
 ./calc.at:1469: cat stderr
-  | 1 2
-./calc.at:1476:  $PREPARSER ./calc  input
 stderr:
 1.2: syntax error
 1.18: syntax error
 1.23: syntax error
 1.41: syntax error
 1.1-46: error: 4444 != 1
-stderr:
 input:
-syntax error
   | 1 + 2 * 3 + !+ ++
 ./calc.at:1469:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170911,16 +171113,6 @@
   }eg
 ' expout || exit 77
 stderr:
-syntax error
-stderr:
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1469:  $PREPARSER ./calc  input
-./calc.at:1478: cat stderr
-stderr:
-./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-input:
 ./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170931,20 +171123,38 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1478: cat stderr
+./calc.at:1476: cat stderr
+input:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1469:  $PREPARSER ./calc  input
+stderr:
+input:
+./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | (!!) + (1 2) = 1
 ./calc.at:1478:  $PREPARSER ./calc  input
-./calc.at:1476: cat stderr
+input:
+  | (* *) + (*) + (*)
+stderr:
+./calc.at:1476:  $PREPARSER ./calc  input
 stderr:
 1.11: syntax error
 1.1-16: error: 2222 != 1
-input:
+stderr:
 ./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
+syntax error
+syntax error
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 stderr:
-  | 1//2
-./calc.at:1476:  $PREPARSER ./calc  input
 1.11: syntax error
 1.1-16: error: 2222 != 1
-stderr:
+syntax error
+syntax error
+syntax error
 ./calc.at:1469: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170955,9 +171165,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-syntax error
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 ./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170968,8 +171175,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-syntax error
-./calc.at:1469: cat stderr
 ./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -170980,33 +171185,36 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1469: cat stderr
 ./calc.at:1478: cat stderr
 input:
+./calc.at:1476: cat stderr
   | (#) + (#) = 2222
 ./calc.at:1469:  $PREPARSER ./calc  input
+input:
 stderr:
+  | (- *) + (1 2) = 1
+./calc.at:1478:  $PREPARSER ./calc  input
 syntax error: invalid character: '#'
 syntax error: invalid character: '#'
 ./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | (- *) + (1 2) = 1
-./calc.at:1478:  $PREPARSER ./calc  input
+  | 1 + 2 * 3 + !+ ++
 stderr:
+./calc.at:1476:  $PREPARSER ./calc  input
 stderr:
 1.4: syntax error
 1.12: syntax error
 1.1-17: error: 2222 != 1
 ./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 syntax error: invalid character: '#'
 syntax error: invalid character: '#'
-./calc.at:1476: cat stderr
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 1.4: syntax error
 1.12: syntax error
 1.1-17: error: 2222 != 1
-input:
-  | error
-./calc.at:1476:  $PREPARSER ./calc  input
 stderr:
 ./calc.at:1469: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
@@ -171018,7 +171226,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-syntax error
+input:
 ./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -171029,11 +171237,19 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+  | 1 + 2 * 3 + !- ++
+./calc.at:1476:  $PREPARSER ./calc  input
+stderr:
 ./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error
-./calc.at:1478: cat stderr
 ./calc.at:1469: cat stderr
+./calc.at:1478: cat stderr
+input:
+  | (1 + #) = 1111
+./calc.at:1469:  $PREPARSER ./calc  input
+input:
+  | (* *) + (*) + (*)
+./calc.at:1478:  $PREPARSER ./calc  input
 ./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -171044,33 +171260,21 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-input:
-  | (* *) + (*) + (*)
-./calc.at:1478:  $PREPARSER ./calc  input
-  | (1 + #) = 1111
-./calc.at:1469:  $PREPARSER ./calc  input
 stderr:
 stderr:
+syntax error: invalid character: '#'
+./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 1.2: syntax error
 1.10: syntax error
 1.16: syntax error
 ./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-syntax error: invalid character: '#'
-./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1476: cat stderr
 stderr:
+syntax error: invalid character: '#'
 stderr:
 1.2: syntax error
 1.10: syntax error
 1.16: syntax error
-syntax error: invalid character: '#'
-input:
-  | 1 = 2 = 3
-./calc.at:1476:  $PREPARSER ./calc  input
-stderr:
-syntax error
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1476: cat stderr
 ./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -171081,8 +171285,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-syntax error
+input:
 ./calc.at:1469: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -171093,7 +171296,26 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+  | (#) + (#) = 2222
+./calc.at:1476:  $PREPARSER ./calc  input
+stderr:
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./calc.at:1478: cat stderr
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1469: cat stderr
+input:
+input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1478:  $PREPARSER ./calc  input
+  | (# + 1) = 1111
+./calc.at:1469:  $PREPARSER ./calc  input
+stderr:
+stderr:
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -171104,37 +171326,19 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1469: cat stderr
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1478:  $PREPARSER ./calc  input
-./calc.at:1476: cat stderr
-stderr:
-input:
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (# + 1) = 1111
-./calc.at:1469:  $PREPARSER ./calc  input
-input:
-stderr:
-stderr:
-  | 
-  | +1
-./calc.at:1476:  $PREPARSER ./calc  input
 syntax error: invalid character: '#'
 ./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
-syntax error
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 input:
 syntax error: invalid character: '#'
   | 1 + 2 * 3 + !- ++
 ./calc.at:1478:  $PREPARSER ./calc  input
-syntax error
+./calc.at:1476: cat stderr
 stderr:
 ./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+input:
 ./calc.at:1469: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -171145,16 +171349,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1476: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+  | (1 + #) = 1111
+./calc.at:1476:  $PREPARSER ./calc  input
+./calc.at:1469: cat stderr
+stderr:
+input:
 ./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -171165,30 +171364,59 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1469: cat stderr
-input:
+syntax error: invalid character: '#'
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+syntax error: invalid character: '#'
   | (1 + # + 1) = 1111
 ./calc.at:1469:  $PREPARSER ./calc  input
-./calc.at:1478: cat stderr
-./calc.at:1476: cat stderr
 stderr:
+stdout:
+./calc.at:1477: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc
+
+stderr:
+./calc.at:1478: cat stderr
 syntax error: invalid character: '#'
-./calc.at:1476:  $PREPARSER ./calc  /dev/null
 ./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+input:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1477:  $PREPARSER ./calc  input
 stderr:
 input:
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | (#) + (#) = 2222
 ./calc.at:1478:  $PREPARSER ./calc  input
 stderr:
-syntax error: invalid character: '#'
+stderr:
 1.2: syntax error: invalid character: '#'
 1.8: syntax error: invalid character: '#'
 ./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-syntax error
+stderr:
+stderr:
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+syntax error: invalid character: '#'
 ./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -171199,7 +171427,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1469: "$PERL" -pi -e 'use strict;
+./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171209,10 +171437,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1478: "$PERL" -pi -e 'use strict;
+input:
+  | 1 2
+./calc.at:1477:  $PREPARSER ./calc  input
+./calc.at:1469: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171222,53 +171450,31 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1469: cat stderr
 ./calc.at:1476: cat stderr
 ./calc.at:1478: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1469:  $PREPARSER ./calc  input
 stderr:
-error: null divisor
-./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-input:
-stderr:
   | (1 + #) = 1111
 ./calc.at:1478:  $PREPARSER ./calc  input
-error: null divisor
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-stderr:
-./calc.at:1476:  $PREPARSER ./calc  input
 stderr:
+1.3: syntax error
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 1.6: syntax error: invalid character: '#'
 ./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-syntax error
-syntax error
-syntax error
-syntax error
-error: 4444 != 1
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1469: cat stderr
+input:
+1.3: syntax error
+  | (# + 1) = 1111
+./calc.at:1476:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1469: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-syntax error
-syntax error
-syntax error
-syntax error
-error: 4444 != 1
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1469:  $PREPARSER ./calc  input
 1.6: syntax error: invalid character: '#'
-./calc.at:1469: cat stderr
-./calc.at:1476: "$PERL" -pi -e 'use strict;
+stderr:
+stderr:
+./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171278,7 +171484,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-545. calc.at:1469:  ok
+syntax error: invalid character: '#'
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+error: null divisor
+./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -171289,28 +171499,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1476: cat stderr
+stderr:
+syntax error: invalid character: '#'
+error: null divisor
+./calc.at:1477: cat stderr
 ./calc.at:1478: cat stderr
-
 input:
-  | (!!) + (1 2) = 1
-./calc.at:1476:  $PREPARSER ./calc  input
-input:
-stderr:
-  | (# + 1) = 1111
-./calc.at:1478:  $PREPARSER ./calc  input
-syntax error
-error: 2222 != 1
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-1.2: syntax error: invalid character: '#'
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-syntax error
-error: 2222 != 1
-stderr:
-1.2: syntax error: invalid character: '#'
-./calc.at:1476: "$PERL" -pi -e 'use strict;
+./calc.at:1469: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171320,7 +171515,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1478: "$PERL" -pi -e 'use strict;
+./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171330,32 +171525,42 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1478: cat stderr
-./calc.at:1476: cat stderr
-555. calc.at:1480: testing Calculator glr2.cc parse.error=verbose api.prefix={calc} %verbose  ...
-./calc.at:1480: mv calc.y.tmp calc.y
-
-./calc.at:1480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-input:
+  | 1//2
+./calc.at:1477:  $PREPARSER ./calc  input
 input:
-  | (1 + # + 1) = 1111
+stderr:
+1.3: syntax error
+  | (# + 1) = 1111
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1478:  $PREPARSER ./calc  input
-  | (- *) + (1 2) = 1
 stderr:
-./calc.at:1476:  $PREPARSER ./calc  input
-1.6: syntax error: invalid character: '#'
 stderr:
+./calc.at:1476: cat stderr
+1.3: syntax error
+1.2: syntax error: invalid character: '#'
 ./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-syntax error
-syntax error
-error: 2222 != 1
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1469: cat stderr
+input:
+545. calc.at:1469:  ok
+  | (1 + # + 1) = 1111
+./calc.at:1476:  $PREPARSER ./calc  input
 stderr:
-1.6: syntax error: invalid character: '#'
 stderr:
-syntax error
-syntax error
-error: 2222 != 1
+1.2: syntax error: invalid character: '#'
+./calc.at:1477: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+syntax error: invalid character: '#'
+./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
+stderr:
 ./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -171366,7 +171571,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+syntax error: invalid character: '#'
+./calc.at:1477: cat stderr
+input:
 ./calc.at:1478: cat stderr
+  | error
+./calc.at:1477:  $PREPARSER ./calc  input
 ./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -171377,19 +171587,26 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+1.1: syntax error
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | (1 + 1) / (1 - 1)
+stderr:
+  | (1 + # + 1) = 1111
 ./calc.at:1478:  $PREPARSER ./calc  input
-./calc.at:1476: cat stderr
+1.1: syntax error
 stderr:
-1.11-17: error: null divisor
+./calc.at:1476: cat stderr
+1.6: syntax error: invalid character: '#'
 ./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+555. calc.at:1480: testing Calculator glr2.cc parse.error=verbose api.prefix={calc} %verbose  ...
+./calc.at:1480: mv calc.y.tmp calc.y
+
+./calc.at:1480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 input:
-stderr:
-1.11-17: error: null divisor
-  | (* *) + (*) + (*)
+  | (1 + 1) / (1 - 1)
 ./calc.at:1476:  $PREPARSER ./calc  input
-./calc.at:1478: "$PERL" -pi -e 'use strict;
+./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171400,19 +171617,14 @@
   }eg
 ' expout || exit 77
 stderr:
-syntax error
-syntax error
-syntax error
+stderr:
+error: null divisor
 ./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1478: cat stderr
+1.6: syntax error: invalid character: '#'
 stderr:
-./calc.at:1480: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
-syntax error
-syntax error
-syntax error
-550. calc.at:1478:  ok
-
-./calc.at:1476: "$PERL" -pi -e 'use strict;
+./calc.at:1477: cat stderr
+error: null divisor
+./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171422,23 +171634,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1476: cat stderr
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1476:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1476:  $PREPARSER ./calc  input
-556. calc.at:1482: testing Calculator C++ %glr-parser %debug  ...
-./calc.at:1482: mv calc.y.tmp calc.y
-
-stderr:
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1482: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+  | 1 = 2 = 3
+./calc.at:1477:  $PREPARSER ./calc  input
 ./calc.at:1476: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -171449,18 +171647,40 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+1.7: syntax error
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1478: cat stderr
+stderr:
+1.7: syntax error
 ./calc.at:1476: cat stderr
 input:
-  | (#) + (#) = 2222
-./calc.at:1476:  $PREPARSER ./calc  input
+  | (1 + 1) / (1 - 1)
+./calc.at:1478:  $PREPARSER ./calc  input
+./calc.at:1477: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+547. calc.at:1476:  ok
 stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.11-17: error: null divisor
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1480: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1476: "$PERL" -pi -e 'use strict;
+./calc.at:1477: cat stderr
+1.11-17: error: null divisor
+
+input:
+  | 
+  | +1
+./calc.at:1477:  $PREPARSER ./calc  input
+./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171470,16 +171690,14 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1476: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1476:  $PREPARSER ./calc  input
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+2.1: syntax error
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1476: "$PERL" -pi -e 'use strict;
+2.1: syntax error
+./calc.at:1478: cat stderr
+550. calc.at:1478:  ok
+./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171489,17 +171707,22 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1476: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1476:  $PREPARSER ./calc  input
-./calc.at:1482: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+./calc.at:1477: cat stderr
+
+556. calc.at:1482: testing Calculator C++ %glr-parser %debug  ...
+./calc.at:1477:  $PREPARSER ./calc  /dev/null
+./calc.at:1482: mv calc.y.tmp calc.y
+
+./calc.at:1482: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.1: syntax error
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1476: "$PERL" -pi -e 'use strict;
+1.1: syntax error
+557. calc.at:1482: testing Calculator glr2.cc %debug  ...
+./calc.at:1482: mv calc.y.tmp calc.y
+
+./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171509,16 +171732,26 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1476: cat stderr
+./calc.at:1482: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+./calc.at:1477: cat stderr
 input:
-  | (1 + # + 1) = 1111
-./calc.at:1476:  $PREPARSER ./calc  input
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1477:  $PREPARSER ./calc  input
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1476: "$PERL" -pi -e 'use strict;
+./calc.at:1482: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171528,16 +171761,19 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1476: cat stderr
+./calc.at:1482: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+./calc.at:1477: cat stderr
 input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1476:  $PREPARSER ./calc  input
+  | (!!) + (1 2) = 1
+./calc.at:1477:  $PREPARSER ./calc  input
 stderr:
-error: null divisor
-./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.11: syntax error
+1.1-16: error: 2222 != 1
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-error: null divisor
-./calc.at:1476: "$PERL" -pi -e 'use strict;
+1.11: syntax error
+1.1-16: error: 2222 != 1
+./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171547,17 +171783,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1476: cat stderr
-547. calc.at:1476:  ok
-
-557. calc.at:1482: testing Calculator glr2.cc %debug  ...
-./calc.at:1482: mv calc.y.tmp calc.y
-
-./calc.at:1482: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1482: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+./calc.at:1477: cat stderr
 stderr:
 stdout:
-./calc.at:1480: "$PERL" -ne '
+input:
+./calc.at:1478: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -171568,7 +171798,15 @@
         || /\t/
         )' calc.cc
 
+  | (- *) + (1 2) = 1
+./calc.at:1477:  $PREPARSER ./calc  input
+stderr:
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
+stderr:
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -171582,19 +171820,14 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1480:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-input:
-  | 1 2
-./calc.at:1480:  $PREPARSER ./calc  input
+./calc.at:1478:  $PREPARSER ./calc  input
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
 stderr:
-syntax error, unexpected number
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error, unexpected number
-./calc.at:1480: "$PERL" -pi -e 'use strict;
+./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171604,16 +171837,19 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1480: cat stderr
 input:
-  | 1//2
-./calc.at:1480:  $PREPARSER ./calc  input
+  | 1 2
+./calc.at:1478:  $PREPARSER ./calc  input
 stderr:
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.3: syntax error
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1477: cat stderr
 stderr:
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1480: "$PERL" -pi -e 'use strict;
+1.3: syntax error
+input:
+  | (* *) + (*) + (*)
+./calc.at:1477:  $PREPARSER ./calc  input
+./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171623,16 +171859,18 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1480: cat stderr
-input:
-  | error
-./calc.at:1480:  $PREPARSER ./calc  input
 stderr:
-syntax error, unexpected invalid token
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error, unexpected invalid token
-./calc.at:1480: "$PERL" -pi -e 'use strict;
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+./calc.at:1478: cat stderr
+input:
+./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171642,16 +171880,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1480: cat stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1480:  $PREPARSER ./calc  input
+  | 1//2
+./calc.at:1478:  $PREPARSER ./calc  input
 stderr:
-syntax error, unexpected '='
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.3: syntax error
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error, unexpected '='
-./calc.at:1480: "$PERL" -pi -e 'use strict;
+./calc.at:1477: cat stderr
+1.3: syntax error
+input:
+./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171661,17 +171899,27 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1480: cat stderr
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1477:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1478: cat stderr
+stderr:
 input:
-  | 
-  | +1
-./calc.at:1480:  $PREPARSER ./calc  input
+input:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1477:  $PREPARSER ./calc  input
+  | error
+./calc.at:1478:  $PREPARSER ./calc  input
 stderr:
-syntax error, unexpected '+'
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error, unexpected '+'
-./calc.at:1480: "$PERL" -pi -e 'use strict;
+1.1: syntax error
+stderr:
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+1.1: syntax error
+./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171681,14 +171929,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1480: cat stderr
-./calc.at:1480:  $PREPARSER ./calc  /dev/null
-stderr:
-syntax error, unexpected end of input
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error, unexpected end of input
-./calc.at:1480: "$PERL" -pi -e 'use strict;
+./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171698,24 +171939,27 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1480: cat stderr
+./calc.at:1477: cat stderr
+./calc.at:1478: cat stderr
 input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1480:  $PREPARSER ./calc  input
+input:
+  | (#) + (#) = 2222
+./calc.at:1477:  $PREPARSER ./calc  input
+  | 1 = 2 = 3
+./calc.at:1478:  $PREPARSER ./calc  input
 stderr:
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-error: 4444 != 1
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-error: 4444 != 1
-./calc.at:1480: "$PERL" -pi -e 'use strict;
+1.7: syntax error
+stderr:
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+stderr:
+1.7: syntax error
+./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171725,20 +171969,22 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1480: cat stderr
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1480:  $PREPARSER ./calc  input
-stderr:
-syntax error, unexpected number
-error: 2222 != 1
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error, unexpected number
-error: 2222 != 1
+./calc.at:1478: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1477: cat stderr
 stderr:
 stdout:
-./calc.at:1477: "$PERL" -ne '
+./calc.at:1478: cat stderr
+input:
+./calc.at:1480: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -171749,6 +171995,13 @@
         || /\t/
         )' calc.cc
 
+  | (1 + #) = 1111
+./calc.at:1477:  $PREPARSER ./calc  input
+stderr:
+1.6: syntax error: invalid character: '#'
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
 input:
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
@@ -171763,41 +172016,23 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1477:  $PREPARSER ./calc  input
-./calc.at:1480: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1480: cat stderr
-stderr:
-input:
-input:
-  | (- *) + (1 2) = 1
 ./calc.at:1480:  $PREPARSER ./calc  input
-  | 1 2
-./calc.at:1477:  $PREPARSER ./calc  input
+  | 
+  | +1
+./calc.at:1478:  $PREPARSER ./calc  input
+1.6: syntax error: invalid character: '#'
 stderr:
 stderr:
-1.3: syntax error
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected number
-error: 2222 != 1
+2.1: syntax error
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected number
-error: 2222 != 1
-1.3: syntax error
+input:
+  | 1 2
+./calc.at:1480:  $PREPARSER ./calc  input
+2.1: syntax error
+stderr:
 ./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -171808,7 +172043,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1480: "$PERL" -pi -e 'use strict;
+syntax error, unexpected number
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171818,29 +172056,18 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1480: cat stderr
 ./calc.at:1477: cat stderr
+syntax error, unexpected number
+./calc.at:1478: cat stderr
 input:
-  | (* *) + (*) + (*)
-./calc.at:1480:  $PREPARSER ./calc  input
-input:
-stderr:
-  | 1//2
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+  | (# + 1) = 1111
 ./calc.at:1477:  $PREPARSER ./calc  input
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.3: syntax error
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1478:  $PREPARSER ./calc  /dev/null
 stderr:
 stderr:
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-1.3: syntax error
-./calc.at:1477: "$PERL" -pi -e 'use strict;
+1.2: syntax error: invalid character: '#'
+./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1480: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171850,7 +172077,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1480: "$PERL" -pi -e 'use strict;
+1.1: syntax error
+stderr:
+1.2: syntax error: invalid character: '#'
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171860,22 +172092,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1477: cat stderr
-input:
-./calc.at:1480: cat stderr
-  | error
-./calc.at:1477:  $PREPARSER ./calc  input
-stderr:
 1.1: syntax error
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1480: cat stderr
 input:
-stderr:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1480:  $PREPARSER ./calc  input
-1.1: syntax error
-stderr:
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1477: "$PERL" -pi -e 'use strict;
+./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171885,20 +172105,61 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-input:
-  | 1 + 2 * 3 + !- ++
+  | 1//2
 ./calc.at:1480:  $PREPARSER ./calc  input
-stderr:
 ./calc.at:1477: cat stderr
+stderr:
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
 ./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stdout:
+./calc.at:1479: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc calc.hh
+
+input:
+stderr:
+  | (1 + # + 1) = 1111
 input:
-  | 1 = 2 = 3
 ./calc.at:1477:  $PREPARSER ./calc  input
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1479:  $PREPARSER ./calc  input
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
 stderr:
-1.7: syntax error
+./calc.at:1478: cat stderr
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+input:
+1.6: syntax error: invalid character: '#'
 ./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 2
+./calc.at:1479:  $PREPARSER ./calc  input
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+stderr:
+./calc.at:1478:  $PREPARSER ./calc  input
+1.6: syntax error: invalid character: '#'
+stderr:
 stderr:
 ./calc.at:1480: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
@@ -171910,39 +172171,23 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-1.7: syntax error
+syntax error, unexpected number
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1480: cat stderr
-./calc.at:1477: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 input:
-  | (#) + (#) = 2222
+  | error
 ./calc.at:1480:  $PREPARSER ./calc  input
 stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1477: cat stderr
-input:
-stderr:
-  | 
-  | +1
-./calc.at:1477:  $PREPARSER ./calc  input
-stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-2.1: syntax error
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error, unexpected number
 stderr:
-2.1: syntax error
-./calc.at:1480: "$PERL" -pi -e 'use strict;
+syntax error, unexpected invalid token
+./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171952,6 +172197,15 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1479: cat stderr
+stderr:
+stderr:
+1.2: syntax error
+1.18: syntax error
+1.23: syntax error
+1.41: syntax error
+1.1-46: error: 4444 != 1
 ./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -171962,23 +172216,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1480: cat stderr
-./calc.at:1477: cat stderr
+syntax error, unexpected invalid token
 input:
-./calc.at:1477:  $PREPARSER ./calc  /dev/null
-  | (1 + #) = 1111
-./calc.at:1480:  $PREPARSER ./calc  input
-stderr:
-stderr:
-1.1: syntax error
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.1: syntax error
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1477: "$PERL" -pi -e 'use strict;
+  | 1//2
+./calc.at:1479:  $PREPARSER ./calc  input
+./calc.at:1477: cat stderr
+./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -171988,6 +172231,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1480: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -171998,43 +172244,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1477: cat stderr
-./calc.at:1480: cat stderr
-input:
+stderr:
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
 input:
-  | (# + 1) = 1111
-./calc.at:1480:  $PREPARSER ./calc  input
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+  | (1 + 1) / (1 - 1)
 ./calc.at:1477:  $PREPARSER ./calc  input
+./calc.at:1478: cat stderr
 stderr:
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
+1.11-17: error: null divisor
 ./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
-syntax error: invalid character: '#'
-./calc.at:1477: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1480: "$PERL" -pi -e 'use strict;
+./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172044,26 +172263,23 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1477: cat stderr
 ./calc.at:1480: cat stderr
 input:
+stderr:
+1.11-17: error: null divisor
+./calc.at:1479: cat stderr
   | (!!) + (1 2) = 1
-./calc.at:1477:  $PREPARSER ./calc  input
-input:
+./calc.at:1478:  $PREPARSER ./calc  input
 stderr:
 1.11: syntax error
 1.1-16: error: 2222 != 1
-  | (1 + # + 1) = 1111
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 1 = 2 = 3
 ./calc.at:1480:  $PREPARSER ./calc  input
-stderr:
-stderr:
-syntax error: invalid character: '#'
-1.11: syntax error
-1.1-16: error: 2222 != 1
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error: invalid character: '#'
+input:
+  | error
+./calc.at:1479:  $PREPARSER ./calc  input
 ./calc.at:1477: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -172074,39 +172290,20 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1480: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1477: cat stderr
-./calc.at:1480: cat stderr
-input:
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1477:  $PREPARSER ./calc  input
-  | (1 + 1) / (1 - 1)
-./calc.at:1480:  $PREPARSER ./calc  input
 stderr:
-error: null divisor
 stderr:
+stderr:
+syntax error, unexpected '='
 ./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-error: null divisor
+1.11: syntax error
+1.1-16: error: 2222 != 1
+syntax error, unexpected invalid token
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error, unexpected '='
 stderr:
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-./calc.at:1477: "$PERL" -pi -e 'use strict;
+syntax error, unexpected invalid token
+./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172116,6 +172313,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1477: cat stderr
+549. calc.at:1477:  ok
 ./calc.at:1480: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -172126,23 +172325,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1478: cat stderr
 ./calc.at:1480: cat stderr
-./calc.at:1477: cat stderr
-554. calc.at:1480:  ok
-input:
-
-  | (* *) + (*) + (*)
-./calc.at:1477:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-./calc.at:1477: "$PERL" -pi -e 'use strict;
+./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172152,24 +172337,36 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1477: cat stderr
 input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1477:  $PREPARSER ./calc  input
+  | (- *) + (1 2) = 1
+./calc.at:1478:  $PREPARSER ./calc  input
+stderr:
+
+input:
+./calc.at:1479: cat stderr
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 
+  | +1
+./calc.at:1480:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1477:  $PREPARSER ./calc  input
+1.4: syntax error
+1.12: syntax error
+1.1-17: error: 2222 != 1
+syntax error, unexpected '+'
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 = 2 = 3
+./calc.at:1479:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-558. calc.at:1485: testing Calculator C++ %glr-parser parse.error=detailed %debug %name-prefix "calc" %verbose  ...
-./calc.at:1485: mv calc.y.tmp calc.y
-
-./calc.at:1485: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1477: "$PERL" -pi -e 'use strict;
+syntax error, unexpected '='
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error, unexpected '+'
+./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172179,18 +172376,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1477: cat stderr
-input:
-  | (#) + (#) = 2222
-./calc.at:1477:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1477: "$PERL" -pi -e 'use strict;
+syntax error, unexpected '='
+./calc.at:1478: cat stderr
+./calc.at:1480: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172200,17 +172389,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1477: cat stderr
 input:
-./calc.at:1485: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
-  | (1 + #) = 1111
-./calc.at:1477:  $PREPARSER ./calc  input
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (* *) + (*) + (*)
+./calc.at:1478:  $PREPARSER ./calc  input
 stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1477: "$PERL" -pi -e 'use strict;
+./calc.at:1480: cat stderr
+./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172220,16 +172404,30 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1477: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1477:  $PREPARSER ./calc  input
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1480:  $PREPARSER ./calc  /dev/null
+558. calc.at:1485: testing Calculator C++ %glr-parser parse.error=detailed %debug %name-prefix "calc" %verbose  ...
+./calc.at:1485: mv calc.y.tmp calc.y
+
 stderr:
-1.2: syntax error: invalid character: '#'
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.2: syntax error: invalid character: '#'
-./calc.at:1477: "$PERL" -pi -e 'use strict;
+1.2: syntax error
+1.10: syntax error
+1.16: syntax error
+./calc.at:1479: cat stderr
+./calc.at:1485: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+syntax error, unexpected end of input
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+input:
+  | 
+  | +1
+./calc.at:1479:  $PREPARSER ./calc  input
+syntax error, unexpected end of input
+./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172239,16 +172437,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1477: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1477:  $PREPARSER ./calc  input
 stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error, unexpected '+'
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1477: "$PERL" -pi -e 'use strict;
+./calc.at:1478: cat stderr
+syntax error, unexpected '+'
+./calc.at:1480: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172258,16 +172453,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1477: cat stderr
 input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1477:  $PREPARSER ./calc  input
-stderr:
-1.11-17: error: null divisor
-./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1480: cat stderr
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1478:  $PREPARSER ./calc  input
 stderr:
-1.11-17: error: null divisor
-./calc.at:1477: "$PERL" -pi -e 'use strict;
+./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172277,72 +172468,39 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1477: cat stderr
-549. calc.at:1477:  ok
-stderr:
-stdout:
-./calc.at:1479: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc calc.hh
-
-
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1479:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1479: cat stderr
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1480:  $PREPARSER ./calc  input
 stderr:
 input:
-  | 1 2
-./calc.at:1479:  $PREPARSER ./calc  input
+./calc.at:1479:  $PREPARSER ./calc  /dev/null
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+error: 4444 != 1
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 + !- ++
+./calc.at:1478:  $PREPARSER ./calc  input
 stderr:
-syntax error, unexpected number
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error, unexpected number
-./calc.at:1479: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1479: cat stderr
-input:
-  | 1//2
-./calc.at:1479:  $PREPARSER ./calc  input
+syntax error, unexpected end of input
 stderr:
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
 ./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-559. calc.at:1485: testing Calculator glr2.cc parse.error=detailed %debug %name-prefix "calc" %verbose  ...
-./calc.at:1485: mv calc.y.tmp calc.y
-
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+error: 4444 != 1
+./calc.at:1485: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 stderr:
-./calc.at:1485: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1479: "$PERL" -pi -e 'use strict;
+stderr:
+syntax error, unexpected end of input
+./calc.at:1480: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172352,17 +172510,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1479: cat stderr
-input:
-  | error
-./calc.at:1479:  $PREPARSER ./calc  input
-stderr:
-syntax error, unexpected invalid token
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error, unexpected invalid token
-./calc.at:1485: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
-./calc.at:1479: "$PERL" -pi -e 'use strict;
+./calc.at:1480: cat stderr
+./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172372,15 +172521,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1479: cat stderr
 input:
-  | 1 = 2 = 3
-./calc.at:1479:  $PREPARSER ./calc  input
-stderr:
-syntax error, unexpected '='
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error, unexpected '='
 ./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -172391,17 +172532,35 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+  | (!!) + (1 2) = 1
+./calc.at:1480:  $PREPARSER ./calc  input
+stderr:
 ./calc.at:1479: cat stderr
+syntax error, unexpected number
+error: 2222 != 1
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1478: cat stderr
+stderr:
 input:
-  | 
-  | +1
+input:
+syntax error, unexpected number
+error: 2222 != 1
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 ./calc.at:1479:  $PREPARSER ./calc  input
+  | (#) + (#) = 2222
+./calc.at:1478:  $PREPARSER ./calc  input
 stderr:
-syntax error, unexpected '+'
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error, unexpected '+'
-./calc.at:1479: "$PERL" -pi -e 'use strict;
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+error: 4444 != 1
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1480: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172411,14 +172570,17 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1479: cat stderr
-./calc.at:1479:  $PREPARSER ./calc  /dev/null
 stderr:
-syntax error, unexpected end of input
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error, unexpected end of input
-./calc.at:1479: "$PERL" -pi -e 'use strict;
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+error: 4444 != 1
+./calc.at:1480: cat stderr
+1.2: syntax error: invalid character: '#'
+1.8: syntax error: invalid character: '#'
+./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172428,23 +172590,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1479: cat stderr
 input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1479:  $PREPARSER ./calc  input
-stderr:
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-error: 4444 != 1
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-error: 4444 != 1
+  | (- *) + (1 2) = 1
+./calc.at:1480:  $PREPARSER ./calc  input
 ./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -172455,18 +172603,34 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
 ./calc.at:1479: cat stderr
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected number
+error: 2222 != 1
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 input:
+./calc.at:1478: cat stderr
   | (!!) + (1 2) = 1
 ./calc.at:1479:  $PREPARSER ./calc  input
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected number
+error: 2222 != 1
 stderr:
 syntax error, unexpected number
 error: 2222 != 1
 ./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
 stderr:
+  | (1 + #) = 1111
+./calc.at:1478:  $PREPARSER ./calc  input
 syntax error, unexpected number
 error: 2222 != 1
-./calc.at:1479: "$PERL" -pi -e 'use strict;
+stderr:
+1.6: syntax error: invalid character: '#'
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1480: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172476,19 +172640,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1479: cat stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1479:  $PREPARSER ./calc  input
-stderr:
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected number
-error: 2222 != 1
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected number
-error: 2222 != 1
+1.6: syntax error: invalid character: '#'
+./calc.at:1480: cat stderr
 ./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -172499,20 +172653,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1479: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1479:  $PREPARSER ./calc  input
-stderr:
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-./calc.at:1479: "$PERL" -pi -e 'use strict;
+./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172524,18 +172665,37 @@
 ' expout || exit 77
 ./calc.at:1479: cat stderr
 input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1479:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (* *) + (*) + (*)
+./calc.at:1480:  $PREPARSER ./calc  input
 stderr:
 input:
-  | 1 + 2 * 3 + !- ++
+./calc.at:1478: cat stderr
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (- *) + (1 2) = 1
 ./calc.at:1479:  $PREPARSER ./calc  input
 stderr:
+stderr:
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected number
+error: 2222 != 1
 ./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+input:
 stderr:
-./calc.at:1479: "$PERL" -pi -e 'use strict;
+  | (# + 1) = 1111
+./calc.at:1478:  $PREPARSER ./calc  input
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected number
+error: 2222 != 1
+stderr:
+1.2: syntax error: invalid character: '#'
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1480: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172545,17 +172705,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1479: cat stderr
-input:
-  | (#) + (#) = 2222
-./calc.at:1479:  $PREPARSER ./calc  input
-stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
+1.2: syntax error: invalid character: '#'
+./calc.at:1480: cat stderr
 ./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -172566,16 +172718,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1479: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1479:  $PREPARSER ./calc  input
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1479: "$PERL" -pi -e 'use strict;
+./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172585,15 +172728,40 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
 ./calc.at:1479: cat stderr
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1480:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1478: cat stderr
 input:
-  | (# + 1) = 1111
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (* *) + (*) + (*)
 ./calc.at:1479:  $PREPARSER ./calc  input
 stderr:
-syntax error: invalid character: '#'
+stderr:
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 ./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+input:
+  | (1 + # + 1) = 1111
+./calc.at:1478:  $PREPARSER ./calc  input
 stderr:
-syntax error: invalid character: '#'
+  | 1 + 2 * 3 + !- ++
+./calc.at:1480:  $PREPARSER ./calc  input
+stderr:
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+1.6: syntax error: invalid character: '#'
+./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+1.6: syntax error: invalid character: '#'
 ./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -172604,16 +172772,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1479: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1479:  $PREPARSER ./calc  input
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1479: "$PERL" -pi -e 'use strict;
+./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172624,15 +172783,7 @@
   }eg
 ' expout || exit 77
 ./calc.at:1479: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1479:  $PREPARSER ./calc  input
-stderr:
-error: null divisor
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-error: null divisor
-./calc.at:1479: "$PERL" -pi -e 'use strict;
+./calc.at:1480: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172642,72 +172793,37 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1479: cat stderr
-552. calc.at:1479:  ok
-
-560. calc.at:1486: testing Calculator C++ %glr-parser parse.error=verbose %debug %name-prefix "calc" %verbose  ...
-./calc.at:1486: mv calc.y.tmp calc.y
-
-./calc.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1486: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+./calc.at:1478: cat stderr
+input:
+./calc.at:1480: cat stderr
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1479:  $PREPARSER ./calc  input
 stderr:
-stdout:
-./calc.at:1478: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc
-
 input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (1 + 1) / (1 - 1)
 ./calc.at:1478:  $PREPARSER ./calc  input
 stderr:
+input:
+1.11-17: error: null divisor
 ./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-input:
-  | 1 2
-./calc.at:1478:  $PREPARSER ./calc  input
+  | (#) + (#) = 2222
+./calc.at:1480:  $PREPARSER ./calc  input
 stderr:
-1.3: syntax error
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.3: syntax error
-./calc.at:1478: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1478: cat stderr
 input:
-  | 1//2
-./calc.at:1478:  $PREPARSER ./calc  input
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+1.11-17: error: null divisor
+  | 1 + 2 * 3 + !- ++
+./calc.at:1479:  $PREPARSER ./calc  input
 stderr:
-1.3: syntax error
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.3: syntax error
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1478: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -172718,16 +172834,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1478: cat stderr
-input:
-  | error
-./calc.at:1478:  $PREPARSER ./calc  input
-stderr:
-1.1: syntax error
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.1: syntax error
-./calc.at:1478: "$PERL" -pi -e 'use strict;
+./calc.at:1480: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172738,15 +172846,9 @@
   }eg
 ' expout || exit 77
 ./calc.at:1478: cat stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1478:  $PREPARSER ./calc  input
-stderr:
-1.7: syntax error
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.7: syntax error
-./calc.at:1478: "$PERL" -pi -e 'use strict;
+./calc.at:1480: cat stderr
+551. calc.at:1478:  ok
+./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172756,17 +172858,27 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1478: cat stderr
 input:
-  | 
-  | +1
-./calc.at:1478:  $PREPARSER ./calc  input
+./calc.at:1479: cat stderr
+  | (1 + #) = 1111
+./calc.at:1480:  $PREPARSER ./calc  input
 stderr:
-2.1: syntax error
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error: invalid character: '#'
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | (#) + (#) = 2222
+./calc.at:1479:  $PREPARSER ./calc  input
+
 stderr:
-2.1: syntax error
-./calc.at:1478: "$PERL" -pi -e 'use strict;
+stderr:
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1480: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172776,14 +172888,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1478: cat stderr
-./calc.at:1478:  $PREPARSER ./calc  /dev/null
-stderr:
-1.1: syntax error
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.1: syntax error
-./calc.at:1478: "$PERL" -pi -e 'use strict;
+./calc.at:1480: cat stderr
+./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172793,24 +172899,28 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1478: cat stderr
+./calc.at:1479: cat stderr
 input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1478:  $PREPARSER ./calc  input
+  | (# + 1) = 1111
+./calc.at:1480:  $PREPARSER ./calc  input
 stderr:
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error: invalid character: '#'
+input:
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (1 + #) = 1111
+./calc.at:1479:  $PREPARSER ./calc  input
 stderr:
-1.2: syntax error
-1.18: syntax error
-1.23: syntax error
-1.41: syntax error
-1.1-46: error: 4444 != 1
-./calc.at:1478: "$PERL" -pi -e 'use strict;
+559. calc.at:1485: testing Calculator glr2.cc parse.error=detailed %debug %name-prefix "calc" %verbose  ...
+stderr:
+./calc.at:1485: mv calc.y.tmp calc.y
+
+syntax error: invalid character: '#'
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error: invalid character: '#'
+./calc.at:1485: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+stderr:
+syntax error: invalid character: '#'
+./calc.at:1480: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172820,18 +172930,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1478: cat stderr
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1478:  $PREPARSER ./calc  input
-stderr:
-1.11: syntax error
-1.1-16: error: 2222 != 1
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.11: syntax error
-1.1-16: error: 2222 != 1
-./calc.at:1478: "$PERL" -pi -e 'use strict;
+./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172841,20 +172940,26 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1478: cat stderr
+./calc.at:1480: cat stderr
+./calc.at:1479: cat stderr
 input:
-  | (- *) + (1 2) = 1
-./calc.at:1478:  $PREPARSER ./calc  input
+input:
+  | (1 + # + 1) = 1111
+./calc.at:1480:  $PREPARSER ./calc  input
+  | (# + 1) = 1111
+./calc.at:1479:  $PREPARSER ./calc  input
 stderr:
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-1.4: syntax error
-1.12: syntax error
-1.1-17: error: 2222 != 1
-./calc.at:1478: "$PERL" -pi -e 'use strict;
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1485: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+./calc.at:1480: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172864,20 +172969,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1478: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1478:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error
-1.10: syntax error
-1.16: syntax error
-./calc.at:1478: "$PERL" -pi -e 'use strict;
+./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172887,32 +172979,24 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1478: cat stderr
+./calc.at:1480: cat stderr
+./calc.at:1479: cat stderr
 input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1478:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+  | (1 + 1) / (1 - 1)
+./calc.at:1480:  $PREPARSER ./calc  input
 input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1478:  $PREPARSER ./calc  input
+  | (1 + # + 1) = 1111
 stderr:
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1479:  $PREPARSER ./calc  input
+error: null divisor
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+syntax error: invalid character: '#'
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1478: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 stderr:
 stdout:
-./calc.at:1478: cat stderr
+error: null divisor
 ./calc.at:1482: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
@@ -172924,13 +173008,8 @@
         || /\t/
         )' calc.cc
 
-input:
-  | (#) + (#) = 2222
-./calc.at:1478:  $PREPARSER ./calc  input
 stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error: invalid character: '#'
 input:
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
@@ -172946,11 +173025,18 @@
   | 2^2^3 = 256
   | (2^2)^3 = 64
 ./calc.at:1482:  $PREPARSER ./calc  input
+./calc.at:1480: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
-1.2: syntax error: invalid character: '#'
-1.8: syntax error: invalid character: '#'
-stderr:
-./calc.at:1478: "$PERL" -pi -e 'use strict;
+./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -172960,6 +173046,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1480: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -173797,7 +173884,9 @@
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
 ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+./calc.at:1479: cat stderr
+554. calc.at:1480: stderr:
+ ok
 Starting parse
 Entering state 0
 Reading a token
@@ -174635,11 +174724,15 @@
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
 input:
-./calc.at:1478: cat stderr
+input:
   | 1 2
 ./calc.at:1482:  $PREPARSER ./calc  input
-input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1479:  $PREPARSER ./calc  input
 stderr:
+stderr:
+error: null divisor
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -174656,9 +174749,10 @@
 Error: popping nterm exp (1)
 Cleanup: discarding lookahead token "number" (2)
 ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | (1 + #) = 1111
-./calc.at:1478:  $PREPARSER ./calc  input
 stderr:
+stderr:
+
+error: null divisor
 Starting parse
 Entering state 0
 Reading a token
@@ -174674,12 +174768,7 @@
 syntax error
 Error: popping nterm exp (1)
 Cleanup: discarding lookahead token "number" (2)
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1482: "$PERL" -pi -e 'use strict;
+./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -174689,7 +174778,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1478: "$PERL" -pi -e 'use strict;
+./calc.at:1482: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -174699,10 +174788,17 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1479: cat stderr
+552. calc.at:1479:  ok
 ./calc.at:1482: cat stderr
+560. calc.at:1486: testing Calculator C++ %glr-parser parse.error=verbose %debug %name-prefix "calc" %verbose  ...
+./calc.at:1486: mv calc.y.tmp calc.y
+
+./calc.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 input:
   | 1//2
 ./calc.at:1482:  $PREPARSER ./calc  input
+
 stderr:
 Starting parse
 Entering state 0
@@ -174725,7 +174821,6 @@
 Error: popping nterm exp (1)
 Cleanup: discarding lookahead token '/' ()
 ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1478: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -174747,11 +174842,6 @@
 Error: popping token '/' ()
 Error: popping nterm exp (1)
 Cleanup: discarding lookahead token '/' ()
-input:
-  | (# + 1) = 1111
-./calc.at:1478:  $PREPARSER ./calc  input
-stderr:
-1.2: syntax error: invalid character: '#'
 ./calc.at:1482: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -174762,24 +174852,15 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.2: syntax error: invalid character: '#'
 ./calc.at:1482: cat stderr
-./calc.at:1478: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 input:
   | error
 ./calc.at:1482:  $PREPARSER ./calc  input
+561. calc.at:1486: testing Calculator glr2.cc parse.error=verbose %debug %name-prefix "calc" %verbose  ...
+./calc.at:1486: mv calc.y.tmp calc.y
+
 stderr:
+./calc.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 Starting parse
 Entering state 0
 Reading a token
@@ -174787,7 +174868,7 @@
 syntax error
 Cleanup: discarding lookahead token "invalid token" ()
 ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1478: cat stderr
+./calc.at:1486: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 stderr:
 Starting parse
 Entering state 0
@@ -174795,12 +174876,6 @@
 Next token is token "invalid token" ()
 syntax error
 Cleanup: discarding lookahead token "invalid token" ()
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1478:  $PREPARSER ./calc  input
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1482: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -174811,23 +174886,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-1.6: syntax error: invalid character: '#'
-./calc.at:1478: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 ./calc.at:1482: cat stderr
+./calc.at:1486: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 input:
   | 1 = 2 = 3
 ./calc.at:1482:  $PREPARSER ./calc  input
-./calc.at:1478: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -174859,10 +174922,6 @@
 Error: popping nterm exp (1)
 Cleanup: discarding lookahead token '=' ()
 ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1478:  $PREPARSER ./calc  input
-stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -174893,20 +174952,6 @@
 Error: popping token '=' ()
 Error: popping nterm exp (1)
 Cleanup: discarding lookahead token '=' ()
-1.11-17: error: null divisor
-./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-1.11-17: error: null divisor
-./calc.at:1478: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 ./calc.at:1482: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -174917,9 +174962,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1478: cat stderr
 ./calc.at:1482: cat stderr
-551. calc.at:1478:  ok
 input:
   | 
   | +1
@@ -174965,7 +175008,6 @@
 syntax error
 Error: popping nterm input ()
 Cleanup: discarding lookahead token '+' ()
-
 ./calc.at:1482: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -175003,10 +175045,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-561. calc.at:1486: testing Calculator glr2.cc parse.error=verbose %debug %name-prefix "calc" %verbose  ...
-./calc.at:1486: mv calc.y.tmp calc.y
-
-./calc.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 ./calc.at:1482: cat stderr
 input:
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
@@ -175635,7 +175673,6 @@
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
 ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1486: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 stderr:
 Starting parse
 Entering state 0
@@ -175758,10 +175795,24 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
 ./calc.at:1482: cat stderr
+stdout:
+./calc.at:1485: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc
+
 input:
   | (- *) + (1 2) = 1
 ./calc.at:1482:  $PREPARSER ./calc  input
+input:
 stderr:
 Starting parse
 Entering state 0
@@ -175880,6 +175931,21 @@
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
 ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1485:  $PREPARSER ./calc  input
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -175997,1550 +176063,6 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-./calc.at:1482: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1482: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1482:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '*' ()
-syntax error
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '*' ()
-syntax error
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '*' ()
-syntax error
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (2222)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (3333)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (3333)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '*' ()
-syntax error
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '*' ()
-syntax error
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '*' ()
-syntax error
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (2222)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (3333)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (3333)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1482: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1482: cat stderr
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1482:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 29
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
--> $$ = nterm exp (3)
-Entering state 30
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
-Entering state 29
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 14
-Reducing stack 0 by rule 17 (line 108):
-   $1 = token '!' ()
-   $2 = token '+' ()
-Cleanup: popping token '+' ()
-Cleanup: popping nterm exp (7)
-./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 29
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
--> $$ = nterm exp (3)
-Entering state 30
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
-Entering state 29
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 14
-Reducing stack 0 by rule 17 (line 108):
-   $1 = token '!' ()
-   $2 = token '+' ()
-Cleanup: popping token '+' ()
-Cleanup: popping nterm exp (7)
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1482:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 29
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
--> $$ = nterm exp (3)
-Entering state 30
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
-Entering state 29
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 13
-Reducing stack 0 by rule 18 (line 109):
-   $1 = token '!' ()
-   $2 = token '-' ()
-Cleanup: popping token '+' ()
-Cleanup: popping nterm exp (7)
-./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 29
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
--> $$ = nterm exp (3)
-Entering state 30
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
-Entering state 29
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 13
-Reducing stack 0 by rule 18 (line 109):
-   $1 = token '!' ()
-   $2 = token '-' ()
-Cleanup: popping token '+' ()
-Cleanup: popping nterm exp (7)
-./calc.at:1482: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1482: cat stderr
-input:
-  | (#) + (#) = 2222
-./calc.at:1482:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (2222)
-Shifting token "number" (2222)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2222)
--> $$ = nterm exp (2222)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (2222)
-   $2 = token '=' ()
-   $3 = nterm exp (2222)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2222)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (2222)
-Shifting token "number" (2222)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2222)
--> $$ = nterm exp (2222)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (2222)
-   $2 = token '=' ()
-   $3 = nterm exp (2222)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2222)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1482: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1482: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1482:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1482: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1482: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1482:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token "number" (1)
-Error: discarding token "number" (1)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token "number" (1)
-Error: discarding token "number" (1)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1482: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1482: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1482:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token "number" (1)
-Error: discarding token "number" (1)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token "number" (1)
-Error: discarding token "number" (1)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1482: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1482: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1482:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' ()
-   $2 = nterm exp (2)
-   $3 = token ')' ()
--> $$ = nterm exp (2)
-Entering state 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 28
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (1)
-   $2 = token '-' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (0)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' ()
-   $2 = nterm exp (0)
-   $3 = token ')' ()
--> $$ = nterm exp (0)
-Entering state 31
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 10 (line 93):
-   $1 = nterm exp (2)
-   $2 = token '/' ()
-   $3 = nterm exp (0)
-error: null divisor
--> $$ = nterm exp (2)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' ()
-   $2 = nterm exp (2)
-   $3 = token ')' ()
--> $$ = nterm exp (2)
-Entering state 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 28
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (1)
-   $2 = token '-' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (0)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' ()
-   $2 = nterm exp (0)
-   $3 = token ')' ()
--> $$ = nterm exp (0)
-Entering state 31
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 10 (line 93):
-   $1 = nterm exp (2)
-   $2 = token '/' ()
-   $3 = nterm exp (0)
-error: null divisor
--> $$ = nterm exp (2)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1482: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1482: cat stderr
-556. calc.at:1482:  ok
-
-562. calc.at:1487: testing Calculator glr2.cc parse.error=custom %debug %name-prefix "calc" %verbose  ...
-./calc.at:1487: mv calc.y.tmp calc.y
-
-./calc.at:1487: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1487: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
-stderr:
-stdout:
-./calc.at:1485: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc
-
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1485:  $PREPARSER ./calc  input
-stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -178378,6 +176900,16 @@
 Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
 ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1482: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -179219,6 +177751,7 @@
   | 1 2
 ./calc.at:1485:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1482: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -179251,6 +177784,242 @@
 syntax error, unexpected number
 Error: popping nterm exp (1)
 Cleanup: discarding lookahead token number (2)
+input:
+  | (* *) + (*) + (*)
+./calc.at:1482:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2222)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (3333)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (3333)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2222)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (3333)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (3333)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
 ./calc.at:1485: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -179287,6 +178056,18 @@
 Error: popping nterm exp (1)
 Cleanup: discarding lookahead token '/' ()
 ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1482: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1482: cat stderr
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -179308,6 +178089,22 @@
 Error: popping token '/' ()
 Error: popping nterm exp (1)
 Cleanup: discarding lookahead token '/' ()
+stdout:
+./calc.at:1479: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc calc.hh
+
+input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1482:  $PREPARSER ./calc  input
+stderr:
 ./calc.at:1485: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -179318,101 +178115,319 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1485: cat stderr
-input:
-  | error
-./calc.at:1485:  $PREPARSER ./calc  input
-stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token invalid token ()
-syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token ()
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token "number" (2)
+Shifting token "number" (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
+Entering state 29
+Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token "number" (3)
+Shifting token "number" (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
+-> $$ = nterm exp (3)
+Entering state 30
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 14
+Reducing stack 0 by rule 17 (line 108):
+   $1 = token '!' ()
+   $2 = token '+' ()
+Cleanup: popping token '+' ()
+Cleanup: popping nterm exp (7)
+./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1485: cat stderr
 stderr:
+input:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token invalid token ()
-syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token ()
-./calc.at:1485: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1485: cat stderr
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token "number" (2)
+Shifting token "number" (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
+Entering state 29
+Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token "number" (3)
+Shifting token "number" (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
+-> $$ = nterm exp (3)
+Entering state 30
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 14
+Reducing stack 0 by rule 17 (line 108):
+   $1 = token '!' ()
+   $2 = token '+' ()
+Cleanup: popping token '+' ()
+Cleanup: popping nterm exp (7)
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1479:  $PREPARSER ./calc  input
 input:
-  | 1 = 2 = 3
+input:
+stderr:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1482:  $PREPARSER ./calc  input
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | error
 ./calc.at:1485:  $PREPARSER ./calc  input
 stderr:
+stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 8
 Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
-Entering state 27
+Entering state 29
 Reading a token
-Next token is token '=' ()
-syntax error, unexpected '='
-Error: popping nterm exp (2)
-Error: popping token '=' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '=' ()
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token "number" (3)
+Shifting token "number" (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
+-> $$ = nterm exp (3)
+Entering state 30
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 13
+Reducing stack 0 by rule 18 (line 109):
+   $1 = token '!' ()
+   $2 = token '-' ()
+Cleanup: popping token '+' ()
+Cleanup: popping nterm exp (7)
+./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token invalid token ()
+syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token ()
 ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 8
 Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
-Entering state 27
+Entering state 29
 Reading a token
-Next token is token '=' ()
-syntax error, unexpected '='
-Error: popping nterm exp (2)
-Error: popping token '=' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '=' ()
-./calc.at:1485: "$PERL" -pi -e 'use strict;
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token "number" (3)
+Shifting token "number" (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
+-> $$ = nterm exp (3)
+Entering state 30
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 13
+Reducing stack 0 by rule 18 (line 109):
+   $1 = token '!' ()
+   $2 = token '-' ()
+Cleanup: popping token '+' ()
+Cleanup: popping nterm exp (7)
+input:
+  | 1 2
+./calc.at:1479:  $PREPARSER ./calc  input
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token invalid token ()
+syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token ()
+syntax error, unexpected number
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1482: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -179422,52 +178437,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1485: cat stderr
-input:
-  | 
-  | +1
-./calc.at:1485:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 87):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '+' ()
-syntax error, unexpected '+'
-Error: popping nterm input ()
-Cleanup: discarding lookahead token '+' ()
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 87):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '+' ()
-syntax error, unexpected '+'
-Error: popping nterm input ()
-Cleanup: discarding lookahead token '+' ()
+syntax error, unexpected number
+./calc.at:1482: cat stderr
 ./calc.at:1485: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -179478,24 +178449,24 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1485: cat stderr
-./calc.at:1485:  $PREPARSER ./calc  /dev/null
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-syntax error, unexpected end of file
-Cleanup: discarding lookahead token end of file ()
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-syntax error, unexpected end of file
-Cleanup: discarding lookahead token end of file ()
-./calc.at:1485: "$PERL" -pi -e 'use strict;
+stdout:
+input:
+./calc.at:1480: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc
+
+  | (#) + (#) = 2222
+./calc.at:1482:  $PREPARSER ./calc  input
+./calc.at:1485: cat stderr
+./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -179505,11 +178476,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1485: cat stderr
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1485:  $PREPARSER ./calc  input
+./calc.at:1479: cat stderr
 stderr:
+input:
 Starting parse
 Entering state 0
 Reading a token
@@ -179517,14 +178486,19 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token ')' ()
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error: invalid character: '#'
+Next token is token error ()
 Shifting token error ()
 Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -179539,119 +178513,101 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (2)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (3)
-Entering state 12
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
 Reading a token
 Next token is token ')' ()
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' ()
-Error: popping nterm exp (3)
-Shifting token error ()
 Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
 Entering state 29
 Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 90):
    $1 = nterm exp (1111)
    $2 = token '+' ()
    $3 = nterm exp (1111)
 -> $$ = nterm exp (2222)
 Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token "number" (2222)
+Shifting token "number" (2222)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2222)
+-> $$ = nterm exp (2222)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (2222)
+   $2 = token '=' ()
+   $3 = nterm exp (2222)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2222)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 = 2 = 3
+./calc.at:1485:  $PREPARSER ./calc  input
+input:
+stderr:
+input:
+Starting parse
+Entering state 0
 Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error: invalid character: '#'
+Next token is token error ()
 Shifting token error ()
 Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token error ()
+Error: discarding token error ()
 Reading a token
 Next token is token ')' ()
 Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (2222)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (3333)
 Entering state 8
+Reading a token
 Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
@@ -179660,52 +178616,19 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 30
-Reading a token
-Next token is token '*' ()
-Reducing stack 0 by rule 9 (line 105):
-   $1 = nterm exp (1)
-   $2 = token '*' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' ()
-Error: popping nterm exp (2)
+syntax error: invalid character: '#'
+Next token is token error ()
 Shifting token error ()
 Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token error ()
+Error: discarding token error ()
 Reading a token
 Next token is token ')' ()
 Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -179713,88 +178636,968 @@
 Entering state 29
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (3333)
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
    $2 = token '+' ()
    $3 = nterm exp (1111)
--> $$ = nterm exp (4444)
+-> $$ = nterm exp (2222)
 Entering state 8
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (2222)
+Shifting token "number" (2222)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2222)
+-> $$ = nterm exp (2222)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (4444)
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (2222)
    $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 4444 != 1
--> $$ = nterm exp (4444)
+   $3 = nterm exp (2222)
+-> $$ = nterm exp (2222)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (4444)
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 82):
+Reducing stack 0 by rule 1 (line 69):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of file ()
+Shifting token "end of input" ()
 Entering state 16
-Cleanup: popping token end of file ()
+Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token ')' ()
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
-Entering state 12
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 27
+Reading a token
+Next token is token '=' ()
+syntax error, unexpected '='
+Error: popping nterm exp (2)
+Error: popping token '=' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '=' ()
+  | 1//2
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1479:  $PREPARSER ./calc  input
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+stderr:
+./calc.at:1480:  $PREPARSER ./calc  input
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 27
+Reading a token
+Next token is token '=' ()
+syntax error, unexpected '='
+Error: popping nterm exp (2)
+Error: popping token '=' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '=' ()
+stderr:
+stderr:
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1485: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+./calc.at:1482: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | 1 2
+./calc.at:1480:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1479: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+syntax error, unexpected number
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1485: cat stderr
+./calc.at:1482: cat stderr
+stderr:
+input:
+syntax error, unexpected number
+  | (1 + #) = 1111
+input:
+./calc.at:1479: cat stderr
+./calc.at:1482:  $PREPARSER ./calc  input
+  | 
+  | +1
+./calc.at:1485:  $PREPARSER ./calc  input
+stderr:
+input:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 87):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 82):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '+' ()
+syntax error, unexpected '+'
+Error: popping nterm input ()
+Cleanup: discarding lookahead token '+' ()
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | error
+./calc.at:1479:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token "number" (1111)
+Shifting token "number" (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+syntax error, unexpected invalid token
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+./calc.at:1480: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token "number" (1111)
+Shifting token "number" (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 87):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 82):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '+' ()
+syntax error, unexpected '+'
+Error: popping nterm input ()
+Cleanup: discarding lookahead token '+' ()
+stderr:
+syntax error, unexpected invalid token
+./calc.at:1480: cat stderr
+input:
+  | 1//2
+./calc.at:1480:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1485: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1479: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+./calc.at:1485: cat stderr
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1482: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+./calc.at:1485:  $PREPARSER ./calc  /dev/null
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+./calc.at:1479: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+syntax error, unexpected end of file
+Cleanup: discarding lookahead token end of file ()
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+syntax error, unexpected end of file
+Cleanup: discarding lookahead token end of file ()
+./calc.at:1480: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1482: cat stderr
+input:
+  | 1 = 2 = 3
+./calc.at:1479:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1485: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+syntax error, unexpected '='
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+syntax error, unexpected '='
+input:
+  | (# + 1) = 1111
+./calc.at:1482:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Reading a token
+Next token is token "number" (1)
+Error: discarding token "number" (1)
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token "number" (1111)
+Shifting token "number" (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1479: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Reading a token
+Next token is token "number" (1)
+Error: discarding token "number" (1)
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token "number" (1111)
+Shifting token "number" (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+./calc.at:1480: cat stderr
+./calc.at:1485: cat stderr
+./calc.at:1479: cat stderr
+input:
+  | error
+./calc.at:1480:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1482: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+syntax error, unexpected invalid token
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
+input:
+syntax error, unexpected invalid token
+  | 
+  | +1
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1479:  $PREPARSER ./calc  input
+./calc.at:1485:  $PREPARSER ./calc  input
+stderr:
+syntax error, unexpected '+'
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1482: cat stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token ')' ()
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 118):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 103):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 103):
+   $1 = nterm exp (2)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (3)
+Entering state 12
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token ')' ()
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' ()
+Error: popping nterm exp (3)
+Shifting token error ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 118):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 103):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 118):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 103):
+   $1 = nterm exp (2222)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (3333)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 30
+Reading a token
+Next token is token '*' ()
+Reducing stack 0 by rule 9 (line 105):
+   $1 = nterm exp (1)
+   $2 = token '*' ()
+   $3 = nterm exp (2)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' ()
+Error: popping nterm exp (2)
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 118):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 103):
+   $1 = nterm exp (3333)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (4444)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (4444)
+   $2 = token '=' ()
+   $3 = nterm exp (1)
+error: 4444 != 1
+-> $$ = nterm exp (4444)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (4444)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 82):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of file ()
+Entering state 16
+Cleanup: popping token end of file ()
+Cleanup: popping nterm input ()
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1480: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+stderr:
+syntax error, unexpected '+'
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token ')' ()
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 118):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
 Reading a token
 Next token is token '+' ()
 Shifting token '+' ()
@@ -180006,6 +179809,100 @@
 Entering state 16
 Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
+input:
+./calc.at:1480: cat stderr
+  | (1 + # + 1) = 1111
+./calc.at:1482:  $PREPARSER ./calc  input
+input:
+stderr:
+  | 1 = 2 = 3
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Reading a token
+Next token is token "number" (1)
+Error: discarding token "number" (1)
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token "number" (1111)
+Shifting token "number" (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+./calc.at:1480:  $PREPARSER ./calc  input
+./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
 ./calc.at:1485: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -180016,11 +179913,138 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+syntax error, unexpected '='
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Reading a token
+Next token is token "number" (1)
+Error: discarding token "number" (1)
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token "number" (1111)
+Shifting token "number" (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+./calc.at:1479: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1485: cat stderr
+stderr:
+syntax error, unexpected '='
+./calc.at:1479: cat stderr
+./calc.at:1479:  $PREPARSER ./calc  /dev/null
+stderr:
+./calc.at:1482: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+syntax error, unexpected end of input
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
+stderr:
+./calc.at:1480: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1482: cat stderr
+syntax error, unexpected end of input
   | (!!) + (1 2) = 1
 ./calc.at:1485:  $PREPARSER ./calc  input
+./calc.at:1480: cat stderr
 stderr:
+input:
 Starting parse
 Entering state 0
 Reading a token
@@ -180133,6 +180157,22 @@
 Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
 ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+./calc.at:1479: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | 
+  | +1
+  | (1 + 1) / (1 - 1)
+./calc.at:1480:  $PREPARSER ./calc  input
+./calc.at:1482:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -180245,6 +180285,246 @@
 Entering state 16
 Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
+stderr:
+stderr:
+syntax error, unexpected '+'
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
+Next token is token ')' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' ()
+   $2 = nterm exp (2)
+   $3 = token ')' ()
+-> $$ = nterm exp (2)
+Entering state 8
+Reading a token
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 28
+Reading a token
+Next token is token ')' ()
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1)
+   $2 = token '-' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (0)
+Entering state 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' ()
+   $2 = nterm exp (0)
+   $3 = token ')' ()
+-> $$ = nterm exp (0)
+Entering state 31
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 10 (line 93):
+   $1 = nterm exp (2)
+   $2 = token '/' ()
+   $3 = nterm exp (0)
+error: null divisor
+-> $$ = nterm exp (2)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1479: cat stderr
+stderr:
+stderr:
+syntax error, unexpected '+'
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
+Next token is token ')' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' ()
+   $2 = nterm exp (2)
+   $3 = token ')' ()
+-> $$ = nterm exp (2)
+Entering state 8
+Reading a token
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 28
+Reading a token
+Next token is token ')' ()
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1)
+   $2 = token '-' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (0)
+Entering state 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' ()
+   $2 = nterm exp (0)
+   $3 = token ')' ()
+-> $$ = nterm exp (0)
+Entering state 31
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 10 (line 93):
+   $1 = nterm exp (2)
+   $2 = token '/' ()
+   $3 = nterm exp (0)
+error: null divisor
+-> $$ = nterm exp (2)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+input:
 ./calc.at:1485: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -180255,11 +180535,41 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1479:  $PREPARSER ./calc  input
+./calc.at:1480: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
 ./calc.at:1485: cat stderr
+./calc.at:1482: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+error: 4444 != 1
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
   | (- *) + (1 2) = 1
 ./calc.at:1485:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1482: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -180378,6 +180688,25 @@
 Cleanup: popping nterm input ()
 ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+error: 4444 != 1
+556. calc.at:1482:  ok
+./calc.at:1480: cat stderr
+./calc.at:1479: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1480:  $PREPARSER ./calc  /dev/null
+stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -180494,6 +180823,15 @@
 Entering state 16
 Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
+./calc.at:1479: cat stderr
+
+stderr:
+syntax error, unexpected end of input
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | (!!) + (1 2) = 1
+./calc.at:1479:  $PREPARSER ./calc  input
+stderr:
 ./calc.at:1485: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -180504,10 +180842,28 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+syntax error, unexpected number
+error: 2222 != 1
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./calc.at:1485: cat stderr
+syntax error, unexpected end of input
+stderr:
+syntax error, unexpected number
+error: 2222 != 1
 input:
   | (* *) + (*) + (*)
 ./calc.at:1485:  $PREPARSER ./calc  input
+./calc.at:1480: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -180625,6 +180981,7 @@
 Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
 ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1480: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -180741,6 +181098,35 @@
 Entering state 16
 Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
+./calc.at:1479: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1479: cat stderr
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1480:  $PREPARSER ./calc  input
+input:
+562. calc.at:1487: testing Calculator glr2.cc parse.error=custom %debug %name-prefix "calc" %verbose  ...
+stderr:
+./calc.at:1487: mv calc.y.tmp calc.y
+
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+error: 4444 != 1
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (- *) + (1 2) = 1
+./calc.at:1479:  $PREPARSER ./calc  input
+./calc.at:1487: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+stderr:
 ./calc.at:1485: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -180751,10 +181137,44 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+error: 4444 != 1
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected number
+error: 2222 != 1
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1485: cat stderr
+stderr:
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected number
+error: 2222 != 1
 input:
   | 1 + 2 * 3 + !+ ++
 ./calc.at:1485:  $PREPARSER ./calc  input
+./calc.at:1480: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1479: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -180822,7 +181242,13 @@
 Cleanup: popping token '+' ()
 Cleanup: popping nterm exp (7)
 ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1480: cat stderr
+./calc.at:1479: cat stderr
+input:
+input:
 stderr:
+  | (!!) + (1 2) = 1
+./calc.at:1480:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -180888,10 +181314,24 @@
    $2 = token '+' ()
 Cleanup: popping token '+' ()
 Cleanup: popping nterm exp (7)
+  | (* *) + (*) + (*)
+./calc.at:1479:  $PREPARSER ./calc  input
 input:
   | 1 + 2 * 3 + !- ++
+stderr:
 ./calc.at:1485:  $PREPARSER ./calc  input
+syntax error, unexpected number
+error: 2222 != 1
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
 stderr:
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error, unexpected number
+error: 2222 != 1
 Starting parse
 Entering state 0
 Reading a token
@@ -180959,6 +181399,10 @@
 Cleanup: popping nterm exp (7)
 ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -181024,6 +181468,27 @@
    $2 = token '-' ()
 Cleanup: popping token '+' ()
 Cleanup: popping nterm exp (7)
+./calc.at:1479: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1480: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1487: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 ./calc.at:1485: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -181034,10 +181499,26 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1479: cat stderr
+./calc.at:1480: cat stderr
+input:
+input:
+  | 1 + 2 * 3 + !+ ++
 ./calc.at:1485: cat stderr
+./calc.at:1479:  $PREPARSER ./calc  input
+  | (- *) + (1 2) = 1
+./calc.at:1480:  $PREPARSER ./calc  input
 input:
+stderr:
   | (#) + (#) = 2222
 ./calc.at:1485:  $PREPARSER ./calc  input
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected number
+error: 2222 != 1
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -181137,6 +181618,8 @@
 Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
 ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -181235,8 +181718,14 @@
 Entering state 16
 Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected number
+error: 2222 != 1
+  | 1 + 2 * 3 + !- ++
+./calc.at:1479:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-stdout:
 ./calc.at:1485: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -181247,38 +181736,41 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1480: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc
-
+./calc.at:1480: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1480: cat stderr
 ./calc.at:1485: cat stderr
 input:
+./calc.at:1479: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | (* *) + (*) + (*)
+./calc.at:1480:  $PREPARSER ./calc  input
+input:
+./calc.at:1479: cat stderr
   | (1 + #) = 1111
 ./calc.at:1485:  $PREPARSER ./calc  input
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1480:  $PREPARSER ./calc  input
 stderr:
 stderr:
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -181357,10 +181849,25 @@
 Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
 ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 stderr:
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 input:
+./calc.at:1480: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | (#) + (#) = 2222
+./calc.at:1479:  $PREPARSER ./calc  input
+stderr:
+stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -181438,12 +181945,12 @@
 Entering state 16
 Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-  | 1 2
-./calc.at:1480:  $PREPARSER ./calc  input
-stderr:
-syntax error, unexpected number
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
 ./calc.at:1485: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -181454,10 +181961,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-syntax error, unexpected number
-./calc.at:1485: cat stderr
-input:
-./calc.at:1480: "$PERL" -pi -e 'use strict;
+./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -181467,10 +181971,20 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1480: cat stderr
+./calc.at:1485: cat stderr
+input:
+input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1480:  $PREPARSER ./calc  input
   | (# + 1) = 1111
 ./calc.at:1485:  $PREPARSER ./calc  input
-./calc.at:1480: cat stderr
+./calc.at:1479: cat stderr
+stderr:
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
+input:
 Starting parse
 Entering state 0
 Reading a token
@@ -181540,7 +182054,11 @@
 Entering state 16
 Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
+  | (1 + #) = 1111
 ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1479:  $PREPARSER ./calc  input
+stderr:
+input:
 stderr:
 Starting parse
 Entering state 0
@@ -181611,12 +182129,13 @@
 Entering state 16
 Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-input:
-  | 1//2
+syntax error: invalid character: '#'
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 + !- ++
 ./calc.at:1480:  $PREPARSER ./calc  input
 stderr:
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
 ./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./calc.at:1485: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -181629,7 +182148,6 @@
 ' expout || exit 77
 stderr:
 ./calc.at:1485: cat stderr
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
 input:
   | (1 + # + 1) = 1111
 ./calc.at:1485:  $PREPARSER ./calc  input
@@ -181727,9 +182245,10 @@
 Entering state 16
 Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-./calc.at:1480: cat stderr
 ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error: invalid character: '#'
 stderr:
+./calc.at:1480: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -181814,12 +182333,12 @@
 Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
 input:
-  | error
+  | (#) + (#) = 2222
 ./calc.at:1480:  $PREPARSER ./calc  input
 stderr:
-syntax error, unexpected invalid token
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
 ./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 ./calc.at:1485: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -181830,9 +182349,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-syntax error, unexpected invalid token
-./calc.at:1485: cat stderr
-./calc.at:1480: "$PERL" -pi -e 'use strict;
+./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -181842,10 +182359,27 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
+./calc.at:1479: cat stderr
+./calc.at:1485: cat stderr
 input:
   | (1 + 1) / (1 - 1)
 ./calc.at:1485:  $PREPARSER ./calc  input
-./calc.at:1480: cat stderr
+./calc.at:1480: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+  | (# + 1) = 1111
+./calc.at:1479:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -181963,10 +182497,10 @@
 Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
 ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1480:  $PREPARSER ./calc  input
 stderr:
+syntax error: invalid character: '#'
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1480: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -182083,20 +182617,12 @@
 Entering state 16
 Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-syntax error, unexpected '='
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error, unexpected '='
-./calc.at:1480: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+input:
+syntax error: invalid character: '#'
+  | (1 + #) = 1111
+./calc.at:1480:  $PREPARSER ./calc  input
+stderr:
 ./calc.at:1485: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -182107,21 +182633,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1480: cat stderr
 ./calc.at:1485: cat stderr
-558. calc.at:1485:  ok
-input:
-  | 
-  | +1
-./calc.at:1480:  $PREPARSER ./calc  input
-stderr:
-syntax error, unexpected '+'
+syntax error: invalid character: '#'
 ./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+558. calc.at:1485:  ok
 stderr:
-syntax error, unexpected '+'
-
-stderr:
-./calc.at:1480: "$PERL" -pi -e 'use strict;
+./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -182131,46 +182648,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stdout:
-./calc.at:1479: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc calc.hh
-
-./calc.at:1480: cat stderr
-./calc.at:1480:  $PREPARSER ./calc  /dev/null
-input:
-stderr:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1479:  $PREPARSER ./calc  input
-syntax error, unexpected end of input
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error, unexpected end of input
-stderr:
-input:
-  | 1 2
-./calc.at:1479:  $PREPARSER ./calc  input
+syntax error: invalid character: '#'
 ./calc.at:1480: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -182181,69 +182659,25 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-syntax error, unexpected number
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
 ./calc.at:1480: cat stderr
-stderr:
-syntax error, unexpected number
-563. calc.at:1489: testing Calculator C++ %glr-parser parse.error=verbose %debug %name-prefix "calc" api.token.prefix={TOK_} %verbose  ...
+./calc.at:1479: cat stderr
 input:
-./calc.at:1489: mv calc.y.tmp calc.y
-
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+  | (# + 1) = 1111
 ./calc.at:1480:  $PREPARSER ./calc  input
-./calc.at:1489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-stderr:
-./calc.at:1479: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-error: 4444 != 1
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1479: cat stderr
-stderr:
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-error: 4444 != 1
 input:
-  | 1//2
+  | (1 + # + 1) = 1111
 ./calc.at:1479:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1480: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+stderr:
+syntax error: invalid character: '#'
 ./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error: invalid character: '#'
+./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-./calc.at:1480: cat stderr
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1480:  $PREPARSER ./calc  input
 stderr:
-syntax error, unexpected number
-error: 2222 != 1
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error: invalid character: '#'
+syntax error: invalid character: '#'
 ./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -182254,13 +182688,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-./calc.at:1479: cat stderr
-syntax error, unexpected number
-error: 2222 != 1
-input:
-  | error
-./calc.at:1479:  $PREPARSER ./calc  input
 ./calc.at:1480: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -182271,25 +182698,29 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
+./calc.at:1479: cat stderr
 ./calc.at:1480: cat stderr
-syntax error, unexpected invalid token
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-stderr:
-syntax error, unexpected invalid token
-  | (- *) + (1 2) = 1
+  | (1 + # + 1) = 1111
 ./calc.at:1480:  $PREPARSER ./calc  input
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1479:  $PREPARSER ./calc  input
+563. calc.at:1489: testing Calculator C++ %glr-parser parse.error=verbose %debug %name-prefix "calc" api.token.prefix={TOK_} %verbose  ...
+./calc.at:1489: mv calc.y.tmp calc.y
+
+./calc.at:1489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+stderr:
+syntax error: invalid character: '#'
 stderr:
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected number
-error: 2222 != 1
 ./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+error: null divisor
+./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected number
-error: 2222 != 1
-./calc.at:1479: "$PERL" -pi -e 'use strict;
+syntax error: invalid character: '#'
+stderr:
+error: null divisor
+./calc.at:1480: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -182299,8 +182730,7 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1479: cat stderr
-./calc.at:1480: "$PERL" -pi -e 'use strict;
+./calc.at:1479: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -182310,31 +182740,19 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-  | 1 = 2 = 3
-./calc.at:1479:  $PREPARSER ./calc  input
+./calc.at:1479: cat stderr
 ./calc.at:1480: cat stderr
-stderr:
-syntax error, unexpected '='
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 input:
-  | (* *) + (*) + (*)
+  | (1 + 1) / (1 - 1)
+553. calc.at:1479:  ok
 ./calc.at:1480:  $PREPARSER ./calc  input
-syntax error, unexpected '='
-./calc.at:1489: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
 stderr:
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+error: null divisor
 ./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-stderr:
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-stdout:
-./calc.at:1479: "$PERL" -pi -e 'use strict;
+error: null divisor
+
+./calc.at:1480: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -182344,7 +182762,23 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1486: "$PERL" -ne '
+./calc.at:1480: cat stderr
+./calc.at:1489: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+555. calc.at:1480:  ok
+
+564. calc.at:1489: testing Calculator glr2.cc parse.error=verbose %debug %name-prefix "calc" api.token.prefix={TOK_} %verbose  ...
+./calc.at:1489: mv calc.y.tmp calc.y
+
+./calc.at:1489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+565. calc.at:1491: testing Calculator C++ %glr-parser %locations %header parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
+./calc.at:1491: mv calc.y.tmp calc.y
+
+./calc.at:1491: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+./calc.at:1489: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
+./calc.at:1491: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
+stderr:
+stdout:
+./calc.at:1482: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
     if (# No starting/ending empty lines.
@@ -182355,18 +182789,6 @@
         || /\t/
         )' calc.cc
 
-./calc.at:1479: cat stderr
-./calc.at:1480: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
 input:
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
@@ -182381,14 +182803,8 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1486:  $PREPARSER ./calc  input
-  | 
-  | +1
-./calc.at:1479:  $PREPARSER ./calc  input
-./calc.at:1480: cat stderr
-stderr:
+./calc.at:1482:  $PREPARSER ./calc  input
 stderr:
-input:
 Starting parse
 Entering state 0
 Reading a token
@@ -183080,8 +183496,7 @@
 -> $$ = nterm exp (3)
 Entering state 32
 Reading a token
-Next token  | 1 + 2 * 3 + !+ ++
- is token '=' ()
+Next token is token '=' ()
 Reducing stack 0 by rule 12 (line 103):
    $1 = nterm exp (2)
    $2 = token '^' ()
@@ -183226,15 +183641,7 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1480:  $PREPARSER ./calc  input
-syntax error, unexpected '+'
-stderr:
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-syntax error, unexpected '+'
-stderr:
+./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -184073,12 +184480,9 @@
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
 input:
-input:
   | 1 2
-./calc.at:1486:  $PREPARSER ./calc  input
-  | 1 + 2 * 3 + !- ++
+./calc.at:1482:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1480:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -184091,21 +184495,10 @@
 Entering state 8
 Reading a token
 Next token is token "number" (2)
-syntax error, unexpected number
+syntax error
 Error: popping nterm exp (1)
 Cleanup: discarding lookahead token "number" (2)
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1479: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -184119,27 +184512,10 @@
 Entering state 8
 Reading a token
 Next token is token "number" (2)
-syntax error, unexpected number
+syntax error
 Error: popping nterm exp (1)
 Cleanup: discarding lookahead token "number" (2)
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1479: cat stderr
-stderr:
-./calc.at:1479:  $PREPARSER ./calc  /dev/null
-stderr:
-syntax error, unexpected end of input
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1480: "$PERL" -pi -e 'use strict;
+./calc.at:1482: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -184149,31 +184525,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-./calc.at:1486: cat stderr
-syntax error, unexpected end of input
-./calc.at:1480: cat stderr
-input:
+./calc.at:1482: cat stderr
 input:
-  | (#) + (#) = 2222
-./calc.at:1480:  $PREPARSER ./calc  input
   | 1//2
-./calc.at:1486:  $PREPARSER ./calc  input
-stderr:
+./calc.at:1482:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1479: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -184190,16 +184546,12 @@
 Entering state 22
 Reading a token
 Next token is token '/' ()
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+syntax error
 Error: popping token '/' ()
 Error: popping nterm exp (1)
 Cleanup: discarding lookahead token '/' ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1479: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -184216,32 +184568,11 @@
 Entering state 22
 Reading a token
 Next token is token '/' ()
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+syntax error
 Error: popping token '/' ()
 Error: popping nterm exp (1)
 Cleanup: discarding lookahead token '/' ()
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1479:  $PREPARSER ./calc  input
-./calc.at:1480: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1480: cat stderr
-stderr:
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-error: 4444 != 1
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1486: "$PERL" -pi -e 'use strict;
+./calc.at:1482: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -184251,86 +184582,26 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-input:
-./calc.at:1486: cat stderr
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-error: 4444 != 1
-  | (1 + #) = 1111
-./calc.at:1480:  $PREPARSER ./calc  input
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1482: cat stderr
 input:
-stderr:
   | error
-./calc.at:1486:  $PREPARSER ./calc  input
-./calc.at:1479: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1482:  $PREPARSER ./calc  input
 stderr:
-syntax error: invalid character: '#'
-./calc.at:1479: cat stderr
 Starting parse
 Entering state 0
 Reading a token
 Next token is token "invalid token" ()
-syntax error, unexpected invalid token
+syntax error
 Cleanup: discarding lookahead token "invalid token" ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-input:
 Starting parse
 Entering state 0
 Reading a token
 Next token is token "invalid token" ()
-syntax error, unexpected invalid token
+syntax error
 Cleanup: discarding lookahead token "invalid token" ()
-  | (!!) + (1 2) = 1
-./calc.at:1479:  $PREPARSER ./calc  input
-./calc.at:1480: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-syntax error, unexpected number
-error: 2222 != 1
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-syntax error, unexpected number
-error: 2222 != 1
-./calc.at:1480: cat stderr
-./calc.at:1486: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1480:  $PREPARSER ./calc  input
-./calc.at:1479: "$PERL" -pi -e 'use strict;
+./calc.at:1482: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -184340,14 +184611,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
+./calc.at:1482: cat stderr
 input:
-syntax error: invalid character: '#'
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 1 = 2 = 3
-./calc.at:1479: cat stderr
-./calc.at:1486:  $PREPARSER ./calc  input
-stderr:
+./calc.at:1482:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -184373,13 +184640,12 @@
 Entering state 27
 Reading a token
 Next token is token '=' ()
-syntax error, unexpected '='
+syntax error
 Error: popping nterm exp (2)
 Error: popping token '=' ()
 Error: popping nterm exp (1)
 Cleanup: discarding lookahead token '=' ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-syntax error: invalid character: '#'
+./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -184405,50 +184671,12 @@
 Entering state 27
 Reading a token
 Next token is token '=' ()
-syntax error, unexpected '='
+syntax error
 Error: popping nterm exp (2)
 Error: popping token '=' ()
 Error: popping nterm exp (1)
 Cleanup: discarding lookahead token '=' ()
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1479:  $PREPARSER ./calc  input
-stderr:
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected number
-error: 2222 != 1
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1480: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-./calc.at:1480: cat stderr
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected number
-error: 2222 != 1
-./calc.at:1486: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1480:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1479: "$PERL" -pi -e 'use strict;
+./calc.at:1482: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -184458,16 +184686,25 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1482: cat stderr
 input:
-syntax error: invalid character: '#'
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | 
   | +1
-./calc.at:1486:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1479: cat stderr
+./calc.at:1482:  $PREPARSER ./calc  input
+stdout:
+./calc.at:1486: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc
+
 stderr:
-syntax error: invalid character: '#'
 Starting parse
 Entering state 0
 Reading a token
@@ -184484,11 +184721,26 @@
 Entering state 6
 Reading a token
 Next token is token '+' ()
-syntax error, unexpected '+'
+syntax error
 Error: popping nterm input ()
 Cleanup: discarding lookahead token '+' ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
 stderr:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1486:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -184505,351 +184757,83 @@
 Entering state 6
 Reading a token
 Next token is token '+' ()
-syntax error, unexpected '+'
+syntax error
 Error: popping nterm input ()
 Cleanup: discarding lookahead token '+' ()
-input:
-  | (* *) + (*) + (*)
-./calc.at:1479:  $PREPARSER ./calc  input
-./calc.at:1480: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-./calc.at:1480: cat stderr
-./calc.at:1486: cat stderr
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-./calc.at:1486:  $PREPARSER ./calc  /dev/null
-input:
-stderr:
-  | (1 + 1) / (1 - 1)
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" ()
-./calc.at:1480:  $PREPARSER ./calc  input
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1479: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-./calc.at:1479: cat stderr
-stderr:
-error: null divisor
-./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" ()
-input:
-stderr:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1479:  $PREPARSER ./calc  input
-error: null divisor
-stderr:
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-input:
-./calc.at:1480: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-  | 1 + 2 * 3 + !- ++
-./calc.at:1479:  $PREPARSER ./calc  input
-./calc.at:1486: cat stderr
-stderr:
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1480: cat stderr
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1486:  $PREPARSER ./calc  input
 stderr:
-555. calc.at:1480: stderr:
- ok
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token ')' ()
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
 Next token is token "number" (1)
 Shifting token "number" (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
    $1 = token "number" (1)
 -> $$ = nterm exp (1)
-Entering state 12
+Entering state 8
 Reading a token
 Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
-Entering state 12
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (2)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (3)
-Entering state 12
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token ')' ()
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' ()
-Error: popping nterm exp (3)
-Shifting token error ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
 Entering state 29
 Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (2222)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (3333)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
 Next token is token '*' ()
 Shifting token '*' ()
 Entering state 21
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token "number" (3)
+Shifting token "number" (3)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
+   $1 = token "number" (3)
+-> $$ = nterm exp (3)
 Entering state 30
 Reading a token
-Next token is token '*' ()
+Next token is token '=' ()
 Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1)
+   $1 = nterm exp (2)
    $2 = token '*' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' ()
-Error: popping nterm exp (2)
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
 Entering state 29
-Reading a token
 Next token is token '=' ()
 Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (3333)
+   $1 = nterm exp (1)
    $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (4444)
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
 Entering state 8
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token "number" (7)
+Shifting token "number" (7)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
+   $1 = token "number" (7)
+-> $$ = nterm exp (7)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (4444)
+   $1 = nterm exp (7)
    $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 4444 != 1
--> $$ = nterm exp (4444)
+   $3 = nterm exp (7)
+-> $$ = nterm exp (7)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (4444)
+   $1 = nterm exp (7)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
@@ -184858,162 +184842,121 @@
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token ')' ()
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
 Next token is token "number" (1)
 Shifting token "number" (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
    $1 = token "number" (1)
 -> $$ = nterm exp (1)
-Entering state 12
+Entering state 8
 Reading a token
 Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
 Entering state 29
 Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
+Reading a token
+Next token is token "number" (3)
+Shifting token "number" (3)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (2)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
+   $1 = token "number" (3)
 -> $$ = nterm exp (3)
-Entering state 12
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Entering state 10
 Reading a token
-Next token is token ')' ()
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' ()
-Error: popping nterm exp (3)
-Shifting token error ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
+Next token is token '=' ()
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' ()
+   $2 = nterm exp (3)
+-> $$ = nterm exp (-3)
+Entering state 30
+Next token is token '=' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (-3)
+-> $$ = nterm exp (-6)
 Entering state 29
-Reading a token
-Next token is token '+' ()
+Next token is token '=' ()
 Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
+   $1 = nterm exp (1)
    $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
+   $3 = nterm exp (-6)
+-> $$ = nterm exp (-5)
 Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
 Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
 Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
+Next token is token "number" (5)
+Shifting token "number" (5)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (5)
+-> $$ = nterm exp (5)
+Entering state 10
 Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (2222)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (3333)
+Next token is token '\n' ()
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' ()
+   $2 = nterm exp (5)
+-> $$ = nterm exp (-5)
+Entering state 27
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (-5)
+   $2 = token '=' ()
+   $3 = nterm exp (-5)
+-> $$ = nterm exp (-5)
 Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (-5)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 74):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
 Reading a token
 Next token is token "number" (1)
 Shifting token "number" (1)
@@ -185021,11 +184964,11 @@
 Reducing stack 0 by rule 5 (line 79):
    $1 = token "number" (1)
 -> $$ = nterm exp (1)
-Entering state 12
+Entering state 10
 Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
+Next token is token '^' ()
+Shifting token '^' ()
+Entering state 23
 Reading a token
 Next token is token "number" (2)
 Shifting token "number" (2)
@@ -185033,169 +184976,71 @@
 Reducing stack 0 by rule 5 (line 79):
    $1 = token "number" (2)
 -> $$ = nterm exp (2)
-Entering state 30
+Entering state 32
 Reading a token
-Next token is token '*' ()
-Reducing stack 0 by rule 9 (line 92):
+Next token is token '=' ()
+Reducing stack 0 by rule 12 (line 103):
    $1 = nterm exp (1)
-   $2 = token '*' ()
+   $2 = token '^' ()
    $3 = nterm exp (2)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' ()
-Error: popping nterm exp (2)
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
+-> $$ = nterm exp (1)
+Entering state 10
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (3333)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (4444)
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' ()
+   $2 = nterm exp (1)
+-> $$ = nterm exp (-1)
 Entering state 8
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
+Reading a token
 Next token is token "number" (1)
 Shifting token "number" (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
    $1 = token "number" (1)
 -> $$ = nterm exp (1)
-Entering state 27
+Entering state 10
 Reading a token
 Next token is token '\n' ()
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' ()
+   $2 = nterm exp (1)
+-> $$ = nterm exp (-1)
+Entering state 27
+Next token is token '\n' ()
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (4444)
+   $1 = nterm exp (-1)
    $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 4444 != 1
--> $$ = nterm exp (4444)
+   $3 = nterm exp (-1)
+-> $$ = nterm exp (-1)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (4444)
+   $1 = nterm exp (-1)
    $2 = token '\n' ()
 -> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input ()
+   $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1479: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-
-./calc.at:1479: cat stderr
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-  | (#) + (#) = 2222
-./calc.at:1486: cat stderr
-./calc.at:1479:  $PREPARSER ./calc  input
-stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error: invalid character: '#'
-syntax error: invalid character: '#'
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1486:  $PREPARSER ./calc  input
-./calc.at:1479: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-Starting parse
-Entering state 0
-Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 15
-Reducing stack 0 by rule 16 (line 107):
-   $1 = token '!' ()
-   $2 = token '!' ()
-Shifting token error ()
-Entering state 11
-Reading a token
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
 Reading a token
 Next token is token "number" (1)
 Shifting token "number" (1)
@@ -185203,34 +185048,42 @@
 Reducing stack 0 by rule 5 (line 79):
    $1 = token "number" (1)
 -> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token "number" (2)
-syntax error, unexpected number
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token "number" (2)
-Error: discarding token "number" (2)
+Entering state 10
 Reading a token
 Next token is token ')' ()
-Entering state 11
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' ()
+   $2 = nterm exp (1)
+-> $$ = nterm exp (-1)
+Entering state 12
 Next token is token ')' ()
 Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
    $1 = token '(' ()
-   $2 = token error ()
+   $2 = nterm exp (-1)
    $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
+-> $$ = nterm exp (-1)
+Entering state 8
+Reading a token
+Next token is token '^' ()
+Shifting token '^' ()
+Entering state 23
+Reading a token
+Next token is token "number" (2)
+Shifting token "number" (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
+Entering state 32
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
+Reducing stack 0 by rule 12 (line 103):
+   $1 = nterm exp (-1)
+   $2 = token '^' ()
+   $3 = nterm exp (2)
+-> $$ = nterm exp (1)
 Entering state 8
 Next token is token '=' ()
 Shifting token '=' ()
@@ -185246,70 +185099,49 @@
 Reading a token
 Next token is token '\n' ()
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (2222)
+   $1 = nterm exp (1)
    $2 = token '=' ()
    $3 = nterm exp (1)
-error: 2222 != 1
--> $$ = nterm exp (2222)
+-> $$ = nterm exp (1)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2222)
+   $1 = nterm exp (1)
    $2 = token '\n' ()
 -> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input ()
+   $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1479: cat stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 15
-Reducing stack 0 by rule 16 (line 107):
-   $1 = token '!' ()
-   $2 = token '!' ()
-Shifting token error ()
-Entering state 11
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 74):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
 Reading a token
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
 Reading a token
 Next token is token "number" (1)
 Shifting token "number" (1)
@@ -185317,148 +185149,81 @@
 Reducing stack 0 by rule 5 (line 79):
    $1 = token "number" (1)
 -> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token "number" (2)
-syntax error, unexpected number
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token "number" (2)
-Error: discarding token "number" (2)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
+Entering state 10
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' ()
+   $2 = nterm exp (1)
+-> $$ = nterm exp (-1)
+Entering state 10
+Next token is token '=' ()
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' ()
+   $2 = nterm exp (-1)
+-> $$ = nterm exp (1)
+Entering state 10
+Next token is token '=' ()
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' ()
+   $2 = nterm exp (1)
+-> $$ = nterm exp (-1)
 Entering state 8
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
+Reading a token
 Next token is token "number" (1)
 Shifting token "number" (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
    $1 = token "number" (1)
 -> $$ = nterm exp (1)
-Entering state 27
+Entering state 10
 Reading a token
 Next token is token '\n' ()
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' ()
+   $2 = nterm exp (1)
+-> $$ = nterm exp (-1)
+Entering state 27
+Next token is token '\n' ()
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (2222)
+   $1 = nterm exp (-1)
    $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 2222 != 1
--> $$ = nterm exp (2222)
+   $3 = nterm exp (-1)
+-> $$ = nterm exp (-1)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2222)
+   $1 = nterm exp (-1)
    $2 = token '\n' ()
 -> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input ()
+   $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-input:
-  | (1 + #) = 1111
-./calc.at:1479:  $PREPARSER ./calc  input
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-syntax error: invalid character: '#'
-./calc.at:1486: cat stderr
-./calc.at:1479: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1486:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 9
-Reducing stack 0 by rule 15 (line 106):
-   $1 = token '-' ()
-   $2 = token error ()
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 74):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
 Reading a token
 Next token is token "number" (1)
 Shifting token "number" (1)
@@ -185466,443 +185231,364 @@
 Reducing stack 0 by rule 5 (line 79):
    $1 = token "number" (1)
 -> $$ = nterm exp (1)
-Entering state 12
+Entering state 8
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
 Reading a token
 Next token is token "number" (2)
-syntax error, unexpected number
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token "number" (2)
-Error: discarding token "number" (2)
+Shifting token "number" (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
+Entering state 28
 Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
+Next token is token '-' ()
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1)
+   $2 = token '-' ()
+   $3 = nterm exp (2)
+-> $$ = nterm exp (-1)
+Entering state 8
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
+Reading a token
+Next token is token "number" (3)
+Shifting token "number" (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
+-> $$ = nterm exp (3)
+Entering state 28
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (-1)
+   $2 = token '-' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (-4)
 Entering state 8
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
+Reading a token
+Next token is token "number" (4)
+Shifting token "number" (4)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 27
+   $1 = token "number" (4)
+-> $$ = nterm exp (4)
+Entering state 10
 Reading a token
 Next token is token '\n' ()
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' ()
+   $2 = nterm exp (4)
+-> $$ = nterm exp (-4)
+Entering state 27
+Next token is token '\n' ()
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (2222)
+   $1 = nterm exp (-4)
    $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 2222 != 1
--> $$ = nterm exp (2222)
+   $3 = nterm exp (-4)
+-> $$ = nterm exp (-4)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2222)
+   $1 = nterm exp (-4)
    $2 = token '\n' ()
 -> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input ()
+   $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1479: cat stderr
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-564. calc.at:1489: testing Calculator glr2.cc parse.error=verbose %debug %name-prefix "calc" api.token.prefix={TOK_} %verbose  ...
-./calc.at:1489: mv calc.y.tmp calc.y
-
-./calc.at:1489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 8
 Reading a token
 Next token is token '-' ()
 Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 9
-Reducing stack 0 by rule 15 (line 106):
-   $1 = token '-' ()
-   $2 = token error ()
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Entering state 19
 Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
 Entering state 12
 Reading a token
-Next token is token "number" (2)
-syntax error, unexpected number
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token "number" (2)
-Error: discarding token "number" (2)
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
+Reading a token
+Next token is token "number" (3)
+Shifting token "number" (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
+-> $$ = nterm exp (3)
+Entering state 28
 Reading a token
 Next token is token ')' ()
-Entering state 11
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (2)
+   $2 = token '-' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (-1)
+Entering state 12
 Next token is token ')' ()
 Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
    $1 = token '(' ()
-   $2 = token error ()
+   $2 = nterm exp (-1)
    $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
+-> $$ = nterm exp (-1)
+Entering state 28
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1)
+   $2 = token '-' ()
+   $3 = nterm exp (-1)
+-> $$ = nterm exp (2)
 Entering state 8
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (2222)
+   $1 = nterm exp (2)
    $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 2222 != 1
--> $$ = nterm exp (2222)
+   $3 = nterm exp (2)
+-> $$ = nterm exp (2)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2222)
+   $1 = nterm exp (2)
    $2 = token '\n' ()
 -> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input ()
+   $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-input:
-  | (# + 1) = 1111
-./calc.at:1479:  $PREPARSER ./calc  input
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1486: cat stderr
-stderr:
-input:
-  | (* *) + (*) + (*)
-./calc.at:1486:  $PREPARSER ./calc  input
-syntax error: invalid character: '#'
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 74):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
 Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
+Next token is token "number" (2)
+Shifting token "number" (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
 Entering state 8
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Next token is token '^' ()
+Shifting token '^' ()
+Entering state 23
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
+Next token is token "number" (2)
+Shifting token "number" (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
+Entering state 32
 Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token '^' ()
+Shifting token '^' ()
+Entering state 23
 Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
+Next token is token "number" (3)
+Shifting token "number" (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
+-> $$ = nterm exp (3)
+Entering state 32
 Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
+Next token is token '=' ()
+Reducing stack 0 by rule 12 (line 103):
+   $1 = nterm exp (2)
+   $2 = token '^' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (8)
+Entering state 32
+Next token is token '=' ()
+Reducing stack 0 by rule 12 (line 103):
+   $1 = nterm exp (2)
+   $2 = token '^' ()
+   $3 = nterm exp (8)
+-> $$ = nterm exp (256)
 Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
 Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
+Next token is token "number" (256)
+Shifting token "number" (256)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (256)
+-> $$ = nterm exp (256)
+Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (2222)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (3333)
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (256)
+   $2 = token '=' ()
+   $3 = nterm exp (256)
+-> $$ = nterm exp (256)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (3333)
+   $1 = nterm exp (256)
    $2 = token '\n' ()
 -> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input ()
+   $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token "number" (2)
+Shifting token "number" (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
+Entering state 12
 Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token '^' ()
+Shifting token '^' ()
+Entering state 23
+Reading a token
+Next token is token "number" (2)
+Shifting token "number" (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
+Entering state 32
 Reading a token
 Next token is token ')' ()
-Entering state 11
+Reducing stack 0 by rule 12 (line 103):
+   $1 = nterm exp (2)
+   $2 = token '^' ()
+   $3 = nterm exp (2)
+-> $$ = nterm exp (4)
+Entering state 12
 Next token is token ')' ()
 Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
    $1 = token '(' ()
-   $2 = token error ()
+   $2 = nterm exp (4)
    $3 = token ')' ()
--> $$ = nterm exp (1111)
+-> $$ = nterm exp (4)
 Entering state 8
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token '^' ()
+Shifting token '^' ()
+Entering state 23
 Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
+Next token is token "number" (3)
+Shifting token "number" (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
+-> $$ = nterm exp (3)
+Entering state 32
 Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
+Next token is token '=' ()
+Reducing stack 0 by rule 12 (line 103):
+   $1 = nterm exp (4)
+   $2 = token '^' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (64)
 Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
 Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
+Next token is token "number" (64)
+Shifting token "number" (64)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (64)
+-> $$ = nterm exp (64)
+Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (2222)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (3333)
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (64)
+   $2 = token '=' ()
+   $3 = nterm exp (64)
+-> $$ = nterm exp (64)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (3333)
+   $1 = nterm exp (64)
    $2 = token '\n' ()
 -> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input ()
+   $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
@@ -185911,49 +185597,7 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-./calc.at:1479: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1479: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1479:  $PREPARSER ./calc  input
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-syntax error: invalid character: '#'
-./calc.at:1486: cat stderr
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1486:  $PREPARSER ./calc  input
-./calc.at:1479: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -185990,41 +185634,51 @@
 -> $$ = nterm exp (3)
 Entering state 30
 Reading a token
-Next token is token '+' ()
+Next token is token '=' ()
 Reducing stack 0 by rule 9 (line 92):
    $1 = nterm exp (2)
    $2 = token '*' ()
    $3 = nterm exp (3)
 -> $$ = nterm exp (6)
 Entering state 29
-Next token is token '+' ()
+Next token is token '=' ()
 Reducing stack 0 by rule 7 (line 90):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (6)
 -> $$ = nterm exp (7)
 Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
 Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
+Next token is token "number" (7)
+Shifting token "number" (7)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (7)
+-> $$ = nterm exp (7)
+Entering state 27
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 14
-Reducing stack 0 by rule 17 (line 108):
-   $1 = token '!' ()
-   $2 = token '+' ()
-Cleanup: popping token '+' ()
-Cleanup: popping nterm exp (7)
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1479: cat stderr
-stderr:
-Starting parse
-Entering state 0
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (7)
+   $2 = token '=' ()
+   $3 = nterm exp (7)
+-> $$ = nterm exp (7)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (7)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
 Reading a token
 Next token is token "number" (1)
 Shifting token "number" (1)
@@ -186050,59 +185704,97 @@
 Shifting token '*' ()
 Entering state 21
 Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
+Reading a token
 Next token is token "number" (3)
 Shifting token "number" (3)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
    $1 = token "number" (3)
 -> $$ = nterm exp (3)
-Entering state 30
+Entering state 10
 Reading a token
-Next token is token '+' ()
+Next token is token '=' ()
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' ()
+   $2 = nterm exp (3)
+-> $$ = nterm exp (-3)
+Entering state 30
+Next token is token '=' ()
 Reducing stack 0 by rule 9 (line 92):
    $1 = nterm exp (2)
    $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
+   $3 = nterm exp (-3)
+-> $$ = nterm exp (-6)
 Entering state 29
-Next token is token '+' ()
+Next token is token '=' ()
 Reducing stack 0 by rule 7 (line 90):
    $1 = nterm exp (1)
    $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
+   $3 = nterm exp (-6)
+-> $$ = nterm exp (-5)
 Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
 Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 14
-Reducing stack 0 by rule 17 (line 108):
-   $1 = token '!' ()
-   $2 = token '+' ()
-Cleanup: popping token '+' ()
-Cleanup: popping nterm exp (7)
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1479:  $PREPARSER ./calc  input
-input:
-stderr:
-error: null divisor
-./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1 + 2 * 3 + !- ++
-./calc.at:1486:  $PREPARSER ./calc  input
-stderr:
-stderr:
-error: null divisor
-./calc.at:1489: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS
-Starting parse
-Entering state 0
+Next token is token "number" (5)
+Shifting token "number" (5)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (5)
+-> $$ = nterm exp (5)
+Entering state 10
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' ()
+   $2 = nterm exp (5)
+-> $$ = nterm exp (-5)
+Entering state 27
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (-5)
+   $2 = token '=' ()
+   $3 = nterm exp (-5)
+-> $$ = nterm exp (-5)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (-5)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 74):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
 Reading a token
 Next token is token "number" (1)
 Shifting token "number" (1)
@@ -186110,11 +185802,11 @@
 Reducing stack 0 by rule 5 (line 79):
    $1 = token "number" (1)
 -> $$ = nterm exp (1)
-Entering state 8
+Entering state 10
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Next token is token '^' ()
+Shifting token '^' ()
+Entering state 23
 Reading a token
 Next token is token "number" (2)
 Shifting token "number" (2)
@@ -186122,54 +185814,28 @@
 Reducing stack 0 by rule 5 (line 79):
    $1 = token "number" (2)
 -> $$ = nterm exp (2)
-Entering state 29
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
--> $$ = nterm exp (3)
-Entering state 30
+Entering state 32
 Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
-Entering state 29
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Next token is token '=' ()
+Reducing stack 0 by rule 12 (line 103):
    $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
+   $2 = token '^' ()
+   $3 = nterm exp (2)
+-> $$ = nterm exp (1)
+Entering state 10
+Next token is token '=' ()
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' ()
+   $2 = nterm exp (1)
+-> $$ = nterm exp (-1)
 Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
 Reading a token
 Next token is token '-' ()
 Shifting token '-' ()
-Entering state 13
-Reducing stack 0 by rule 18 (line 109):
-   $1 = token '!' ()
-   $2 = token '-' ()
-Cleanup: popping token '+' ()
-Cleanup: popping nterm exp (7)
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
+Entering state 2
 Reading a token
 Next token is token "number" (1)
 Shifting token "number" (1)
@@ -186177,2177 +185843,7 @@
 Reducing stack 0 by rule 5 (line 79):
    $1 = token "number" (1)
 -> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 29
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
--> $$ = nterm exp (3)
-Entering state 30
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
-Entering state 29
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 13
-Reducing stack 0 by rule 18 (line 109):
-   $1 = token '!' ()
-   $2 = token '-' ()
-Cleanup: popping token '+' ()
-Cleanup: popping nterm exp (7)
-./calc.at:1479: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1479: cat stderr
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1486: cat stderr
-input:
-553. calc.at:1479:  ok
-  | (#) + (#) = 2222
-./calc.at:1486:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (2222)
-Shifting token "number" (2222)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2222)
--> $$ = nterm exp (2222)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (2222)
-   $2 = token '=' ()
-   $3 = nterm exp (2222)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2222)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (2222)
-Shifting token "number" (2222)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2222)
--> $$ = nterm exp (2222)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (2222)
-   $2 = token '=' ()
-   $3 = nterm exp (2222)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2222)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1486: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1486:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-565. calc.at:1491: testing Calculator C++ %glr-parser %locations %header parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
-./calc.at:1491: mv calc.y.tmp calc.y
-
-./calc.at:1491: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1486: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1486:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token "number" (1)
-Error: discarding token "number" (1)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token "number" (1)
-Error: discarding token "number" (1)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1486: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1486:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token "number" (1)
-Error: discarding token "number" (1)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token "number" (1)
-Error: discarding token "number" (1)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1491: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1486: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1486:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' ()
-   $2 = nterm exp (2)
-   $3 = token ')' ()
--> $$ = nterm exp (2)
-Entering state 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 28
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (1)
-   $2 = token '-' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (0)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' ()
-   $2 = nterm exp (0)
-   $3 = token ')' ()
--> $$ = nterm exp (0)
-Entering state 31
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 10 (line 93):
-   $1 = nterm exp (2)
-   $2 = token '/' ()
-   $3 = nterm exp (0)
-error: null divisor
--> $$ = nterm exp (2)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' ()
-   $2 = nterm exp (2)
-   $3 = token ')' ()
--> $$ = nterm exp (2)
-Entering state 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 28
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (1)
-   $2 = token '-' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (0)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' ()
-   $2 = nterm exp (0)
-   $3 = token ')' ()
--> $$ = nterm exp (0)
-Entering state 31
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 10 (line 93):
-   $1 = nterm exp (2)
-   $2 = token '/' ()
-   $3 = nterm exp (0)
-error: null divisor
--> $$ = nterm exp (2)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1486: cat stderr
-560. calc.at:1486:  ok
-
-566. calc.at:1491: testing Calculator glr2.cc %locations %header parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
-./calc.at:1491: mv calc.y.tmp calc.y
-
-./calc.at:1491: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1491: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
-stderr:
-stdout:
-./calc.at:1482: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc
-
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1482:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 29
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
--> $$ = nterm exp (3)
-Entering state 30
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
-Entering state 29
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (7)
-Shifting token "number" (7)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (7)
--> $$ = nterm exp (7)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (7)
-   $2 = token '=' ()
-   $3 = nterm exp (7)
--> $$ = nterm exp (7)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (7)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 29
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
--> $$ = nterm exp (3)
-Entering state 10
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' ()
-   $2 = nterm exp (3)
--> $$ = nterm exp (-3)
-Entering state 30
-Next token is token '=' ()
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (-3)
--> $$ = nterm exp (-6)
-Entering state 29
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (-6)
--> $$ = nterm exp (-5)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token "number" (5)
-Shifting token "number" (5)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (5)
--> $$ = nterm exp (5)
-Entering state 10
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' ()
-   $2 = nterm exp (5)
--> $$ = nterm exp (-5)
-Entering state 27
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (-5)
-   $2 = token '=' ()
-   $3 = nterm exp (-5)
--> $$ = nterm exp (-5)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (-5)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 74):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 10
-Reading a token
-Next token is token '^' ()
-Shifting token '^' ()
-Entering state 23
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 32
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 103):
-   $1 = nterm exp (1)
-   $2 = token '^' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (1)
-Entering state 10
-Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' ()
-   $2 = nterm exp (1)
--> $$ = nterm exp (-1)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 10
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' ()
-   $2 = nterm exp (1)
--> $$ = nterm exp (-1)
-Entering state 27
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (-1)
-   $2 = token '=' ()
-   $3 = nterm exp (-1)
--> $$ = nterm exp (-1)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (-1)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 10
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' ()
-   $2 = nterm exp (1)
--> $$ = nterm exp (-1)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' ()
-   $2 = nterm exp (-1)
-   $3 = token ')' ()
--> $$ = nterm exp (-1)
-Entering state 8
-Reading a token
-Next token is token '^' ()
-Shifting token '^' ()
-Entering state 23
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 32
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 103):
-   $1 = nterm exp (-1)
-   $2 = token '^' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (1)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1)
-   $2 = token '=' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (1)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 74):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 10
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' ()
-   $2 = nterm exp (1)
--> $$ = nterm exp (-1)
-Entering state 10
-Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' ()
-   $2 = nterm exp (-1)
--> $$ = nterm exp (1)
-Entering state 10
-Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' ()
-   $2 = nterm exp (1)
--> $$ = nterm exp (-1)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 10
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' ()
-   $2 = nterm exp (1)
--> $$ = nterm exp (-1)
-Entering state 27
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (-1)
-   $2 = token '=' ()
-   $3 = nterm exp (-1)
--> $$ = nterm exp (-1)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (-1)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 74):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 28
-Reading a token
-Next token is token '-' ()
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (1)
-   $2 = token '-' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (-1)
-Entering state 8
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
-Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
--> $$ = nterm exp (3)
-Entering state 28
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (-1)
-   $2 = token '-' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (-4)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token "number" (4)
-Shifting token "number" (4)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (4)
--> $$ = nterm exp (4)
-Entering state 10
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' ()
-   $2 = nterm exp (4)
--> $$ = nterm exp (-4)
-Entering state 27
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (-4)
-   $2 = token '=' ()
-   $3 = nterm exp (-4)
--> $$ = nterm exp (-4)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (-4)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 12
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
-Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
--> $$ = nterm exp (3)
-Entering state 28
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (2)
-   $2 = token '-' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (-1)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' ()
-   $2 = nterm exp (-1)
-   $3 = token ')' ()
--> $$ = nterm exp (-1)
-Entering state 28
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (1)
-   $2 = token '-' ()
-   $3 = nterm exp (-1)
--> $$ = nterm exp (2)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (2)
-   $2 = token '=' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (2)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 74):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 8
-Reading a token
-Next token is token '^' ()
-Shifting token '^' ()
-Entering state 23
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 32
-Reading a token
-Next token is token '^' ()
-Shifting token '^' ()
-Entering state 23
-Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
--> $$ = nterm exp (3)
-Entering state 32
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 103):
-   $1 = nterm exp (2)
-   $2 = token '^' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (8)
-Entering state 32
-Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 103):
-   $1 = nterm exp (2)
-   $2 = token '^' ()
-   $3 = nterm exp (8)
--> $$ = nterm exp (256)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (256)
-Shifting token "number" (256)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (256)
--> $$ = nterm exp (256)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (256)
-   $2 = token '=' ()
-   $3 = nterm exp (256)
--> $$ = nterm exp (256)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (256)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 12
-Reading a token
-Next token is token '^' ()
-Shifting token '^' ()
-Entering state 23
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 32
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 12 (line 103):
-   $1 = nterm exp (2)
-   $2 = token '^' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (4)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' ()
-   $2 = nterm exp (4)
-   $3 = token ')' ()
--> $$ = nterm exp (4)
-Entering state 8
-Reading a token
-Next token is token '^' ()
-Shifting token '^' ()
-Entering state 23
-Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
--> $$ = nterm exp (3)
-Entering state 32
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 103):
-   $1 = nterm exp (4)
-   $2 = token '^' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (64)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (64)
-Shifting token "number" (64)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (64)
--> $$ = nterm exp (64)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (64)
-   $2 = token '=' ()
-   $3 = nterm exp (64)
--> $$ = nterm exp (64)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (64)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 29
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
--> $$ = nterm exp (3)
-Entering state 30
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
-Entering state 29
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (7)
-Shifting token "number" (7)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (7)
--> $$ = nterm exp (7)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (7)
-   $2 = token '=' ()
-   $3 = nterm exp (7)
--> $$ = nterm exp (7)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (7)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 29
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
--> $$ = nterm exp (3)
-Entering state 10
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' ()
-   $2 = nterm exp (3)
--> $$ = nterm exp (-3)
-Entering state 30
-Next token is token '=' ()
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (-3)
--> $$ = nterm exp (-6)
-Entering state 29
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (-6)
--> $$ = nterm exp (-5)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token "number" (5)
-Shifting token "number" (5)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (5)
--> $$ = nterm exp (5)
-Entering state 10
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' ()
-   $2 = nterm exp (5)
--> $$ = nterm exp (-5)
-Entering state 27
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (-5)
-   $2 = token '=' ()
-   $3 = nterm exp (-5)
--> $$ = nterm exp (-5)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (-5)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 74):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 10
-Reading a token
-Next token is token '^' ()
-Shifting token '^' ()
-Entering state 23
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 32
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 103):
-   $1 = nterm exp (1)
-   $2 = token '^' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (1)
-Entering state 10
-Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' ()
-   $2 = nterm exp (1)
--> $$ = nterm exp (-1)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 10
+Entering state 10
 Reading a token
 Next token is token '\n' ()
 Reducing stack 0 by rule 11 (line 102):
@@ -188939,9 +186435,20 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
+./calc.at:1482: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 input:
   | 1 2
-./calc.at:1482:  $PREPARSER ./calc  input
+./calc.at:1486:  $PREPARSER ./calc  input
+./calc.at:1482: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -188955,10 +186462,12 @@
 Entering state 8
 Reading a token
 Next token is token "number" (2)
-syntax error
+syntax error, unexpected number
 Error: popping nterm exp (1)
 Cleanup: discarding lookahead token "number" (2)
-./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1482:  $PREPARSER ./calc  /dev/null
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -188972,67 +186481,24 @@
 Entering state 8
 Reading a token
 Next token is token "number" (2)
-syntax error
+syntax error, unexpected number
 Error: popping nterm exp (1)
 Cleanup: discarding lookahead token "number" (2)
-./calc.at:1482: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1482: cat stderr
-input:
-  | 1//2
-./calc.at:1482:  $PREPARSER ./calc  input
-stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Reading a token
-Next token is token '/' ()
+Now at end of input.
 syntax error
-Error: popping token '/' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '/' ()
+Cleanup: discarding lookahead token "end of input" ()
 ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Reading a token
-Next token is token '/' ()
+Now at end of input.
 syntax error
-Error: popping token '/' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '/' ()
-./calc.at:1482: "$PERL" -pi -e 'use strict;
+Cleanup: discarding lookahead token "end of input" ()
+./calc.at:1486: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -189042,25 +186508,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1482: cat stderr
-input:
-  | error
-./calc.at:1482:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "invalid token" ()
-syntax error
-Cleanup: discarding lookahead token "invalid token" ()
-./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "invalid token" ()
-syntax error
-Cleanup: discarding lookahead token "invalid token" ()
 ./calc.at:1482: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -189071,10 +186518,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1482: cat stderr
+./calc.at:1486: cat stderr
 input:
-  | 1 = 2 = 3
-./calc.at:1482:  $PREPARSER ./calc  input
+  | 1//2
+./calc.at:1486:  $PREPARSER ./calc  input
+./calc.at:1482: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -189087,25 +186535,19 @@
 -> $$ = nterm exp (1)
 Entering state 8
 Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 27
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
 Reading a token
-Next token is token '=' ()
-syntax error
-Error: popping nterm exp (2)
-Error: popping token '=' ()
+Next token is token '/' ()
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' ()
 Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '=' ()
-./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Cleanup: discarding lookahead token '/' ()
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1482:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -189118,121 +186560,15 @@
 -> $$ = nterm exp (1)
 Entering state 8
 Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 27
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
 Reading a token
-Next token is token '=' ()
-syntax error
-Error: popping nterm exp (2)
-Error: popping token '=' ()
+Next token is token '/' ()
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' ()
 Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '=' ()
-./calc.at:1482: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1482: cat stderr
-input:
-  | 
-  | +1
-./calc.at:1482:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 74):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '+' ()
-syntax error
-Error: popping nterm input ()
-Cleanup: discarding lookahead token '+' ()
-./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 74):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '+' ()
-syntax error
-Error: popping nterm input ()
-Cleanup: discarding lookahead token '+' ()
-./calc.at:1482: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1482: cat stderr
-./calc.at:1482:  $PREPARSER ./calc  /dev/null
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-syntax error
-Cleanup: discarding lookahead token "end of input" ()
-./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-syntax error
-Cleanup: discarding lookahead token "end of input" ()
-./calc.at:1482: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1482: cat stderr
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1482:  $PREPARSER ./calc  input
+Cleanup: discarding lookahead token '/' ()
 stderr:
 Starting parse
 Entering state 0
@@ -189730,6 +187066,20 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1486: cat stderr
+input:
+  | error
+./calc.at:1486:  $PREPARSER ./calc  input
 ./calc.at:1482: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -189740,11 +187090,36 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "invalid token" ()
+syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" ()
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "invalid token" ()
+syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" ()
 ./calc.at:1482: cat stderr
 input:
   | (!!) + (1 2) = 1
 ./calc.at:1482:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Reading a token
@@ -189858,6 +187233,7 @@
 Cleanup: popping nterm input ()
 ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1486: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -189969,6 +187345,40 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
+input:
+  | 1 = 2 = 3
+./calc.at:1486:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token "number" (2)
+Shifting token "number" (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
+Entering state 27
+Reading a token
+Next token is token '=' ()
+syntax error, unexpected '='
+Error: popping nterm exp (2)
+Error: popping token '=' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '=' ()
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1482: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -189979,11 +187389,52 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token "number" (2)
+Shifting token "number" (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
+Entering state 27
+Reading a token
+Next token is token '=' ()
+syntax error, unexpected '='
+Error: popping nterm exp (2)
+Error: popping token '=' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '=' ()
 ./calc.at:1482: cat stderr
 input:
   | (- *) + (1 2) = 1
 ./calc.at:1482:  $PREPARSER ./calc  input
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
+./calc.at:1486: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -190218,6 +187669,31 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
+input:
+  | 
+  | +1
+./calc.at:1486:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 74):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '+' ()
+syntax error, unexpected '+'
+Error: popping nterm input ()
+Cleanup: discarding lookahead token '+' ()
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1482: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -190228,10 +187704,41 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 74):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '+' ()
+syntax error, unexpected '+'
+Error: popping nterm input ()
+Cleanup: discarding lookahead token '+' ()
 ./calc.at:1482: cat stderr
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 input:
   | (* *) + (*) + (*)
 ./calc.at:1482:  $PREPARSER ./calc  input
+./calc.at:1486: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -190349,7 +187856,16 @@
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
 ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1486:  $PREPARSER ./calc  /dev/null
 stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" ()
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -190465,6 +187981,13 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" ()
 ./calc.at:1482: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -190475,10 +187998,24 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1482: cat stderr
+./calc.at:1486: cat stderr
 input:
   | 1 + 2 * 3 + !+ ++
 ./calc.at:1482:  $PREPARSER ./calc  input
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1486:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -190545,7 +188082,256 @@
    $2 = token '+' ()
 Cleanup: popping token '+' ()
 Cleanup: popping nterm exp (7)
+stderr:
 ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token ')' ()
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (3)
+Entering state 12
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token ')' ()
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' ()
+Error: popping nterm exp (3)
+Shifting token error ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2222)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (3333)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token "number" (2)
+Shifting token "number" (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
+Entering state 30
+Reading a token
+Next token is token '*' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1)
+   $2 = token '*' ()
+   $3 = nterm exp (2)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' ()
+Error: popping nterm exp (2)
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (3333)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (4444)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (4444)
+   $2 = token '=' ()
+   $3 = nterm exp (1)
+error: 4444 != 1
+-> $$ = nterm exp (4444)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (4444)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -190612,7 +188398,255 @@
    $2 = token '+' ()
 Cleanup: popping token '+' ()
 Cleanup: popping nterm exp (7)
+stderr:
 input:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token ')' ()
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (3)
+Entering state 12
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token ')' ()
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' ()
+Error: popping nterm exp (3)
+Shifting token error ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2222)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (3333)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token "number" (2)
+Shifting token "number" (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
+Entering state 30
+Reading a token
+Next token is token '*' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1)
+   $2 = token '*' ()
+   $3 = nterm exp (2)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' ()
+Error: popping nterm exp (2)
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (3333)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (4444)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (4444)
+   $2 = token '=' ()
+   $3 = nterm exp (1)
+error: 4444 != 1
+-> $$ = nterm exp (4444)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (4444)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
   | 1 + 2 * 3 + !- ++
 ./calc.at:1482:  $PREPARSER ./calc  input
 stderr:
@@ -190748,6 +188782,17 @@
    $2 = token '-' ()
 Cleanup: popping token '+' ()
 Cleanup: popping nterm exp (7)
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1486: cat stderr
 ./calc.at:1482: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -190758,10 +188803,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1482: cat stderr
 input:
-  | (#) + (#) = 2222
-./calc.at:1482:  $PREPARSER ./calc  input
+./calc.at:1482: cat stderr
+  | (!!) + (1 2) = 1
+./calc.at:1486:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -190770,16 +188815,20 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
+Reading a token
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 15
+Reducing stack 0 by rule 16 (line 107):
+   $1 = token '!' ()
+   $2 = token '!' ()
 Shifting token error ()
 Entering state 11
-Next token is token error ()
-Error: discarding token error ()
 Reading a token
 Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
@@ -190797,12 +188846,21 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token "number" (2)
+syntax error, unexpected number
+Error: popping nterm exp (1)
 Shifting token error ()
 Entering state 11
-Next token is token error ()
-Error: discarding token error ()
+Next token is token "number" (2)
+Error: discarding token "number" (2)
 Reading a token
 Next token is token ')' ()
 Entering state 11
@@ -190827,19 +188885,20 @@
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (2222)
-Shifting token "number" (2222)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2222)
--> $$ = nterm exp (2222)
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
 Reducing stack 0 by rule 6 (line 80):
    $1 = nterm exp (2222)
    $2 = token '=' ()
-   $3 = nterm exp (2222)
+   $3 = nterm exp (1)
+error: 2222 != 1
 -> $$ = nterm exp (2222)
 Entering state 8
 Next token is token '\n' ()
@@ -190860,8 +188919,11 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+input:
+  | (#) + (#) = 2222
+./calc.at:1482:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -190869,16 +188931,20 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
+Reading a token
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 15
+Reducing stack 0 by rule 16 (line 107):
+   $1 = token '!' ()
+   $2 = token '!' ()
 Shifting token error ()
 Entering state 11
-Next token is token error ()
-Error: discarding token error ()
 Reading a token
 Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
@@ -190896,12 +188962,21 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token "number" (2)
+syntax error, unexpected number
+Error: popping nterm exp (1)
 Shifting token error ()
 Entering state 11
-Next token is token error ()
-Error: discarding token error ()
+Next token is token "number" (2)
+Error: discarding token "number" (2)
 Reading a token
 Next token is token ')' ()
 Entering state 11
@@ -190926,19 +189001,20 @@
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (2222)
-Shifting token "number" (2222)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2222)
--> $$ = nterm exp (2222)
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
 Reducing stack 0 by rule 6 (line 80):
    $1 = nterm exp (2222)
    $2 = token '=' ()
-   $3 = nterm exp (2222)
+   $3 = nterm exp (1)
+error: 2222 != 1
 -> $$ = nterm exp (2222)
 Entering state 8
 Next token is token '\n' ()
@@ -190959,20 +189035,6 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-./calc.at:1482: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1482: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1482:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -190981,22 +189043,35 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
 Reading a token
 Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
 Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
 syntax error: invalid character: '#'
 Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
 Shifting token error ()
 Entering state 11
 Next token is token error ()
@@ -191012,32 +189087,39 @@
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
-Entering state 8
+Entering state 29
 Reading a token
 Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
+Next token is token "number" (2222)
+Shifting token "number" (2222)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
+   $1 = token "number" (2222)
+-> $$ = nterm exp (2222)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
+   $1 = nterm exp (2222)
    $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
+   $3 = nterm exp (2222)
+-> $$ = nterm exp (2222)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
+   $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
@@ -191060,22 +189142,35 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
 Reading a token
 Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
 Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
 syntax error: invalid character: '#'
 Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
 Shifting token error ()
 Entering state 11
 Next token is token error ()
@@ -191091,32 +189186,39 @@
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
-Entering state 8
+Entering state 29
 Reading a token
 Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
+Next token is token "number" (2222)
+Shifting token "number" (2222)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
+   $1 = token "number" (2222)
+-> $$ = nterm exp (2222)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
+   $1 = nterm exp (2222)
    $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
+   $3 = nterm exp (2222)
+-> $$ = nterm exp (2222)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
+   $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
@@ -191130,6 +189232,17 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1486: cat stderr
 ./calc.at:1482: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -191140,10 +189253,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1482: cat stderr
 input:
-  | (# + 1) = 1111
-./calc.at:1482:  $PREPARSER ./calc  input
+  | (- *) + (1 2) = 1
+./calc.at:1486:  $PREPARSER ./calc  input
+./calc.at:1482: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -191152,18 +189265,57 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 9
+Reducing stack 0 by rule 15 (line 106):
+   $1 = token '-' ()
+   $2 = token error ()
 Shifting token error ()
 Entering state 11
-Next token is token error ()
-Error: discarding token error ()
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
 Reading a token
 Next token is token '+' ()
-Error: discarding token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
 Next token is token "number" (1)
-Error: discarding token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token "number" (2)
+syntax error, unexpected number
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token "number" (2)
+Error: discarding token "number" (2)
 Reading a token
 Next token is token ')' ()
 Entering state 11
@@ -191175,32 +189327,40 @@
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
-Entering state 8
+Entering state 29
 Reading a token
 Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
+   $1 = nterm exp (2222)
    $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
+   $3 = nterm exp (1)
+error: 2222 != 1
+-> $$ = nterm exp (2222)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
+   $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
@@ -191214,8 +189374,9 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+input:
 Starting parse
 Entering state 0
 Reading a token
@@ -191223,18 +189384,57 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 9
+Reducing stack 0 by rule 15 (line 106):
+   $1 = token '-' ()
+   $2 = token error ()
 Shifting token error ()
 Entering state 11
-Next token is token error ()
-Error: discarding token error ()
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
 Reading a token
 Next token is token '+' ()
-Error: discarding token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
 Next token is token "number" (1)
-Error: discarding token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token "number" (2)
+syntax error, unexpected number
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token "number" (2)
+Error: discarding token "number" (2)
 Reading a token
 Next token is token ')' ()
 Entering state 11
@@ -191246,32 +189446,40 @@
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
-Entering state 8
+Entering state 29
 Reading a token
 Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
+   $1 = nterm exp (2222)
    $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
+   $3 = nterm exp (1)
+error: 2222 != 1
+-> $$ = nterm exp (2222)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
+   $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
@@ -191285,19 +189493,7 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-./calc.at:1482: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1482: cat stderr
-input:
-  | (1 + # + 1) = 1111
+  | (1 + #) = 1111
 ./calc.at:1482:  $PREPARSER ./calc  input
 stderr:
 Starting parse
@@ -191328,12 +189524,6 @@
 Next token is token error ()
 Error: discarding token error ()
 Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token "number" (1)
-Error: discarding token "number" (1)
-Reading a token
 Next token is token ')' ()
 Entering state 11
 Next token is token ')' ()
@@ -191384,6 +189574,16 @@
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
 ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -191413,12 +189613,6 @@
 Next token is token error ()
 Error: discarding token error ()
 Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token "number" (1)
-Error: discarding token "number" (1)
-Reading a token
 Next token is token ')' ()
 Entering state 11
 Next token is token ')' ()
@@ -191468,6 +189662,10 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
+./calc.at:1486: cat stderr
+input:
+  | (* *) + (*) + (*)
+./calc.at:1486:  $PREPARSER ./calc  input
 ./calc.at:1482: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -191478,10 +189676,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1482: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1482:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -191490,101 +189684,101 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
 Reading a token
 Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 29
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (2)
-Entering state 12
+Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
-   $2 = nterm exp (2)
+   $2 = token error ()
    $3 = token ')' ()
--> $$ = nterm exp (2)
-Entering state 8
+-> $$ = nterm exp (1111)
+Entering state 29
 Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 28
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (1)
-   $2 = token '-' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (0)
-Entering state 12
+Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
-   $2 = nterm exp (0)
+   $2 = token error ()
    $3 = token ')' ()
--> $$ = nterm exp (0)
-Entering state 31
+-> $$ = nterm exp (1111)
+Entering state 29
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 10 (line 93):
-   $1 = nterm exp (2)
-   $2 = token '/' ()
-   $3 = nterm exp (0)
-error: null divisor
--> $$ = nterm exp (2)
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2222)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (3333)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2)
+   $1 = nterm exp (3333)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
@@ -191598,7 +189792,7 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -191607,101 +189801,256 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
 Reading a token
 Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 29
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
 Reading a token
 Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '+' ()
 Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1)
+   $1 = nterm exp (1111)
    $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (2)
-Entering state 12
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
-   $2 = nterm exp (2)
+   $2 = token error ()
    $3 = token ')' ()
--> $$ = nterm exp (2)
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2222)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (3333)
 Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (3333)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
 Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+./calc.at:1482: cat stderr
+input:
+  | (# + 1) = 1111
+./calc.at:1482:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
 Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Reading a token
 Next token is token "number" (1)
-Shifting token "number" (1)
+Error: discarding token "number" (1)
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token "number" (1111)
+Shifting token "number" (1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
 Reading a token
 Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 28
+Error: discarding token "number" (1)
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (1)
-   $2 = token '-' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (0)
-Entering state 12
+Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
-   $2 = nterm exp (0)
+   $2 = token error ()
    $3 = token ')' ()
--> $$ = nterm exp (0)
-Entering state 31
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token "number" (1111)
+Shifting token "number" (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 10 (line 93):
-   $1 = nterm exp (2)
-   $2 = token '/' ()
-   $3 = nterm exp (0)
-error: null divisor
--> $$ = nterm exp (2)
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2)
+   $1 = nterm exp (1111)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
@@ -191715,6 +190064,10 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
+./calc.at:1486: cat stderr
+input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1486:  $PREPARSER ./calc  input
 ./calc.at:1482: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -191725,51 +190078,15 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1482: cat stderr
-557. calc.at:1482:  ok
-
-567. calc.at:1492: testing Calculator C++ %glr-parser %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
-./calc.at:1492: mv calc.y.tmp calc.y
-
-./calc.at:1492: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1492: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
-stderr:
-stdout:
-./calc.at:1485: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc
-
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1485:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 8
 Reading a token
@@ -191777,11 +190094,11 @@
 Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 29
 Reading a token
@@ -191789,65 +190106,54 @@
 Shifting token '*' ()
 Entering state 21
 Reading a token
-Next token is token number (3)
-Shifting token number (3)
+Next token is token "number" (3)
+Shifting token "number" (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
 -> $$ = nterm exp (3)
 Entering state 30
 Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 9 (line 105):
+Next token is token '+' ()
+Reducing stack 0 by rule 9 (line 92):
    $1 = nterm exp (2)
    $2 = token '*' ()
    $3 = nterm exp (3)
 -> $$ = nterm exp (6)
 Entering state 29
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (6)
 -> $$ = nterm exp (7)
 Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token number (7)
-Shifting token number (7)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (7)
--> $$ = nterm exp (7)
-Entering state 27
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
 Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (7)
-   $2 = token '=' ()
-   $3 = nterm exp (7)
--> $$ = nterm exp (7)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (7)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 14
+Reducing stack 0 by rule 17 (line 108):
+   $1 = token '!' ()
+   $2 = token '+' ()
+Cleanup: popping token '+' ()
+Cleanup: popping nterm exp (7)
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 8
 Reading a token
@@ -191855,11 +190161,503 @@
 Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
+Entering state 29
+Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token "number" (3)
+Shifting token "number" (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
+-> $$ = nterm exp (3)
+Entering state 30
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 14
+Reducing stack 0 by rule 17 (line 108):
+   $1 = token '!' ()
+   $2 = token '+' ()
+Cleanup: popping token '+' ()
+Cleanup: popping nterm exp (7)
+./calc.at:1482: cat stderr
+input:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1486:  $PREPARSER ./calc  input
+stderr:
+input:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token "number" (2)
+Shifting token "number" (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
+Entering state 29
+Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token "number" (3)
+Shifting token "number" (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
+-> $$ = nterm exp (3)
+Entering state 30
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 13
+Reducing stack 0 by rule 18 (line 109):
+   $1 = token '!' ()
+   $2 = token '-' ()
+Cleanup: popping token '+' ()
+Cleanup: popping nterm exp (7)
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+  | (1 + # + 1) = 1111
+stdout:
+./calc.at:1482:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1489: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc
+
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token "number" (2)
+Shifting token "number" (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
+Entering state 29
+Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token "number" (3)
+Shifting token "number" (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
+-> $$ = nterm exp (3)
+Entering state 30
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 13
+Reducing stack 0 by rule 18 (line 109):
+   $1 = token '!' ()
+   $2 = token '-' ()
+Cleanup: popping token '+' ()
+Cleanup: popping nterm exp (7)
+stderr:
+input:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Reading a token
+Next token is token "number" (1)
+Error: discarding token "number" (1)
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token "number" (1111)
+Shifting token "number" (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1489:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Reading a token
+Next token is token "number" (1)
+Error: discarding token "number" (1)
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token "number" (1111)
+Shifting token "number" (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+./calc.at:1486: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token "number" (2)
+Shifting token "number" (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
+Entering state 29
+Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token "number" (3)
+Shifting token "number" (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
+-> $$ = nterm exp (3)
+Entering state 30
+Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token "number" (7)
+Shifting token "number" (7)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (7)
+-> $$ = nterm exp (7)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (7)
+   $2 = token '=' ()
+   $3 = nterm exp (7)
+-> $$ = nterm exp (7)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (7)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token "number" (2)
+Shifting token "number" (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 29
 Reading a token
@@ -191871,29 +190669,29 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token number (3)
-Shifting token number (3)
+Next token is token "number" (3)
+Shifting token "number" (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
 -> $$ = nterm exp (3)
 Entering state 10
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 115):
+Reducing stack 0 by rule 11 (line 102):
    $1 = token '-' ()
    $2 = nterm exp (3)
 -> $$ = nterm exp (-3)
 Entering state 30
 Next token is token '=' ()
-Reducing stack 0 by rule 9 (line 105):
+Reducing stack 0 by rule 9 (line 92):
    $1 = nterm exp (2)
    $2 = token '*' ()
    $3 = nterm exp (-3)
 -> $$ = nterm exp (-6)
 Entering state 29
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
+Reducing stack 0 by rule 7 (line 90):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (-6)
@@ -191907,22 +190705,22 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token number (5)
-Shifting token number (5)
+Next token is token "number" (5)
+Shifting token "number" (5)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (5)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (5)
 -> $$ = nterm exp (5)
 Entering state 10
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 115):
+Reducing stack 0 by rule 11 (line 102):
    $1 = token '-' ()
    $2 = nterm exp (5)
 -> $$ = nterm exp (-5)
 Entering state 27
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
+Reducing stack 0 by rule 6 (line 80):
    $1 = nterm exp (-5)
    $2 = token '=' ()
    $3 = nterm exp (-5)
@@ -191931,12 +190729,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
+Reducing stack 0 by rule 4 (line 75):
    $1 = nterm exp (-5)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -191945,11 +190743,11 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 3
-Reducing stack 0 by rule 3 (line 87):
+Reducing stack 0 by rule 3 (line 74):
    $1 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -191959,11 +190757,11 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 10
 Reading a token
@@ -191971,23 +190769,23 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 32
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 116):
+Reducing stack 0 by rule 12 (line 103):
    $1 = nterm exp (1)
    $2 = token '^' ()
    $3 = nterm exp (2)
 -> $$ = nterm exp (1)
 Entering state 10
 Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 115):
+Reducing stack 0 by rule 11 (line 102):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
@@ -192000,22 +190798,22 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 10
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 115):
+Reducing stack 0 by rule 11 (line 102):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
 Entering state 27
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
+Reducing stack 0 by rule 6 (line 80):
    $1 = nterm exp (-1)
    $2 = token '=' ()
    $3 = nterm exp (-1)
@@ -192024,12 +190822,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
+Reducing stack 0 by rule 4 (line 75):
    $1 = nterm exp (-1)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -192043,16 +190841,16 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 10
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 11 (line 115):
+Reducing stack 0 by rule 11 (line 102):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
@@ -192060,7 +190858,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 26
-Reducing stack 0 by rule 13 (line 117):
+Reducing stack 0 by rule 13 (line 104):
    $1 = token '(' ()
    $2 = nterm exp (-1)
    $3 = token ')' ()
@@ -192071,16 +190869,16 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 32
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 116):
+Reducing stack 0 by rule 12 (line 103):
    $1 = nterm exp (-1)
    $2 = token '^' ()
    $3 = nterm exp (2)
@@ -192090,16 +190888,16 @@
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
+Reducing stack 0 by rule 6 (line 80):
    $1 = nterm exp (1)
    $2 = token '=' ()
    $3 = nterm exp (1)
@@ -192108,12 +190906,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
+Reducing stack 0 by rule 4 (line 75):
    $1 = nterm exp (1)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -192122,11 +190920,11 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 3
-Reducing stack 0 by rule 3 (line 87):
+Reducing stack 0 by rule 3 (line 74):
    $1 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -192144,28 +190942,28 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 10
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 115):
+Reducing stack 0 by rule 11 (line 102):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
 Entering state 10
 Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 115):
+Reducing stack 0 by rule 11 (line 102):
    $1 = token '-' ()
    $2 = nterm exp (-1)
 -> $$ = nterm exp (1)
 Entering state 10
 Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 115):
+Reducing stack 0 by rule 11 (line 102):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
@@ -192178,22 +190976,22 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 10
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 115):
+Reducing stack 0 by rule 11 (line 102):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
 Entering state 27
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
+Reducing stack 0 by rule 6 (line 80):
    $1 = nterm exp (-1)
    $2 = token '=' ()
    $3 = nterm exp (-1)
@@ -192202,12 +191000,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
+Reducing stack 0 by rule 4 (line 75):
    $1 = nterm exp (-1)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -192216,21 +191014,21 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 3
-Reducing stack 0 by rule 3 (line 87):
+Reducing stack 0 by rule 3 (line 74):
    $1 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 8
 Reading a token
@@ -192238,16 +191036,16 @@
 Shifting token '-' ()
 Entering state 19
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 28
 Reading a token
 Next token is token '-' ()
-Reducing stack 0 by rule 8 (line 104):
+Reducing stack 0 by rule 8 (line 91):
    $1 = nterm exp (1)
    $2 = token '-' ()
    $3 = nterm exp (2)
@@ -192257,16 +191055,16 @@
 Shifting token '-' ()
 Entering state 19
 Reading a token
-Next token is token number (3)
-Shifting token number (3)
+Next token is token "number" (3)
+Shifting token "number" (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
 -> $$ = nterm exp (3)
 Entering state 28
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 8 (line 104):
+Reducing stack 0 by rule 8 (line 91):
    $1 = nterm exp (-1)
    $2 = token '-' ()
    $3 = nterm exp (3)
@@ -192280,22 +191078,22 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token number (4)
-Shifting token number (4)
+Next token is token "number" (4)
+Shifting token "number" (4)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (4)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (4)
 -> $$ = nterm exp (4)
 Entering state 10
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 115):
+Reducing stack 0 by rule 11 (line 102):
    $1 = token '-' ()
    $2 = nterm exp (4)
 -> $$ = nterm exp (-4)
 Entering state 27
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
+Reducing stack 0 by rule 6 (line 80):
    $1 = nterm exp (-4)
    $2 = token '=' ()
    $3 = nterm exp (-4)
@@ -192304,22 +191102,22 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
+Reducing stack 0 by rule 4 (line 75):
    $1 = nterm exp (-4)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 8
 Reading a token
@@ -192331,11 +191129,11 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 12
 Reading a token
@@ -192343,16 +191141,16 @@
 Shifting token '-' ()
 Entering state 19
 Reading a token
-Next token is token number (3)
-Shifting token number (3)
+Next token is token "number" (3)
+Shifting token "number" (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
 -> $$ = nterm exp (3)
 Entering state 28
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 104):
+Reducing stack 0 by rule 8 (line 91):
    $1 = nterm exp (2)
    $2 = token '-' ()
    $3 = nterm exp (3)
@@ -192361,7 +191159,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 26
-Reducing stack 0 by rule 13 (line 117):
+Reducing stack 0 by rule 13 (line 104):
    $1 = token '(' ()
    $2 = nterm exp (-1)
    $3 = token ')' ()
@@ -192369,7 +191167,7 @@
 Entering state 28
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 8 (line 104):
+Reducing stack 0 by rule 8 (line 91):
    $1 = nterm exp (1)
    $2 = token '-' ()
    $3 = nterm exp (-1)
@@ -192379,16 +191177,16 @@
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
+Reducing stack 0 by rule 6 (line 80):
    $1 = nterm exp (2)
    $2 = token '=' ()
    $3 = nterm exp (2)
@@ -192397,12 +191195,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
+Reducing stack 0 by rule 4 (line 75):
    $1 = nterm exp (2)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -192411,21 +191209,21 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 3
-Reducing stack 0 by rule 3 (line 87):
+Reducing stack 0 by rule 3 (line 74):
    $1 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 8
 Reading a token
@@ -192433,11 +191231,11 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 32
 Reading a token
@@ -192445,23 +191243,23 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token number (3)
-Shifting token number (3)
+Next token is token "number" (3)
+Shifting token "number" (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
 -> $$ = nterm exp (3)
 Entering state 32
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 116):
+Reducing stack 0 by rule 12 (line 103):
    $1 = nterm exp (2)
    $2 = token '^' ()
    $3 = nterm exp (3)
 -> $$ = nterm exp (8)
 Entering state 32
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 116):
+Reducing stack 0 by rule 12 (line 103):
    $1 = nterm exp (2)
    $2 = token '^' ()
    $3 = nterm exp (8)
@@ -192471,16 +191269,16 @@
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (256)
-Shifting token number (256)
+Next token is token "number" (256)
+Shifting token "number" (256)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (256)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (256)
 -> $$ = nterm exp (256)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
+Reducing stack 0 by rule 6 (line 80):
    $1 = nterm exp (256)
    $2 = token '=' ()
    $3 = nterm exp (256)
@@ -192489,12 +191287,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
+Reducing stack 0 by rule 4 (line 75):
    $1 = nterm exp (256)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -192504,11 +191302,11 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 12
 Reading a token
@@ -192516,16 +191314,16 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 32
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 12 (line 116):
+Reducing stack 0 by rule 12 (line 103):
    $1 = nterm exp (2)
    $2 = token '^' ()
    $3 = nterm exp (2)
@@ -192534,7 +191332,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 26
-Reducing stack 0 by rule 13 (line 117):
+Reducing stack 0 by rule 13 (line 104):
    $1 = token '(' ()
    $2 = nterm exp (4)
    $3 = token ')' ()
@@ -192545,16 +191343,16 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token number (3)
-Shifting token number (3)
+Next token is token "number" (3)
+Shifting token "number" (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
 -> $$ = nterm exp (3)
 Entering state 32
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 116):
+Reducing stack 0 by rule 12 (line 103):
    $1 = nterm exp (4)
    $2 = token '^' ()
    $3 = nterm exp (3)
@@ -192564,16 +191362,16 @@
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (64)
-Shifting token number (64)
+Next token is token "number" (64)
+Shifting token "number" (64)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (64)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (64)
 -> $$ = nterm exp (64)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
+Reducing stack 0 by rule 6 (line 80):
    $1 = nterm exp (64)
    $2 = token '=' ()
    $3 = nterm exp (64)
@@ -192582,32 +191380,46 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
+Reducing stack 0 by rule 4 (line 75):
    $1 = nterm exp (64)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of file ()
+Shifting token "end of input" ()
 Entering state 16
-Cleanup: popping token end of file ()
+Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+./calc.at:1482: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | (#) + (#) = 2222
+./calc.at:1486:  $PREPARSER ./calc  input
+stderr:
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 8
 Reading a token
@@ -192615,11 +191427,11 @@
 Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 29
 Reading a token
@@ -192627,23 +191439,23 @@
 Shifting token '*' ()
 Entering state 21
 Reading a token
-Next token is token number (3)
-Shifting token number (3)
+Next token is token "number" (3)
+Shifting token "number" (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
 -> $$ = nterm exp (3)
 Entering state 30
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 9 (line 105):
+Reducing stack 0 by rule 9 (line 92):
    $1 = nterm exp (2)
    $2 = token '*' ()
    $3 = nterm exp (3)
 -> $$ = nterm exp (6)
 Entering state 29
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
+Reducing stack 0 by rule 7 (line 90):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (6)
@@ -192653,16 +191465,16 @@
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (7)
-Shifting token number (7)
+Next token is token "number" (7)
+Shifting token "number" (7)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (7)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (7)
 -> $$ = nterm exp (7)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
+Reducing stack 0 by rule 6 (line 80):
    $1 = nterm exp (7)
    $2 = token '=' ()
    $3 = nterm exp (7)
@@ -192671,21 +191483,21 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
+Reducing stack 0 by rule 4 (line 75):
    $1 = nterm exp (7)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 82):
+Reducing stack 0 by rule 1 (line 69):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 8
 Reading a token
@@ -192693,11 +191505,11 @@
 Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 29
 Reading a token
@@ -192709,29 +191521,29 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token number (3)
-Shifting token number (3)
+Next token is token "number" (3)
+Shifting token "number" (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
 -> $$ = nterm exp (3)
 Entering state 10
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 115):
+Reducing stack 0 by rule 11 (line 102):
    $1 = token '-' ()
    $2 = nterm exp (3)
 -> $$ = nterm exp (-3)
 Entering state 30
 Next token is token '=' ()
-Reducing stack 0 by rule 9 (line 105):
+Reducing stack 0 by rule 9 (line 92):
    $1 = nterm exp (2)
    $2 = token '*' ()
    $3 = nterm exp (-3)
 -> $$ = nterm exp (-6)
 Entering state 29
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
+Reducing stack 0 by rule 7 (line 90):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (-6)
@@ -192745,22 +191557,22 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token number (5)
-Shifting token number (5)
+Next token is token "number" (5)
+Shifting token "number" (5)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (5)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (5)
 -> $$ = nterm exp (5)
 Entering state 10
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 115):
+Reducing stack 0 by rule 11 (line 102):
    $1 = token '-' ()
    $2 = nterm exp (5)
 -> $$ = nterm exp (-5)
 Entering state 27
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
+Reducing stack 0 by rule 6 (line 80):
    $1 = nterm exp (-5)
    $2 = token '=' ()
    $3 = nterm exp (-5)
@@ -192769,12 +191581,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
+Reducing stack 0 by rule 4 (line 75):
    $1 = nterm exp (-5)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -192783,11 +191595,11 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 3
-Reducing stack 0 by rule 3 (line 87):
+Reducing stack 0 by rule 3 (line 74):
    $1 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -192797,11 +191609,11 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 10
 Reading a token
@@ -192809,23 +191621,23 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 32
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 116):
+Reducing stack 0 by rule 12 (line 103):
    $1 = nterm exp (1)
    $2 = token '^' ()
    $3 = nterm exp (2)
 -> $$ = nterm exp (1)
 Entering state 10
 Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 115):
+Reducing stack 0 by rule 11 (line 102):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
@@ -192838,22 +191650,22 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 10
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 115):
+Reducing stack 0 by rule 11 (line 102):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
 Entering state 27
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
+Reducing stack 0 by rule 6 (line 80):
    $1 = nterm exp (-1)
    $2 = token '=' ()
    $3 = nterm exp (-1)
@@ -192862,12 +191674,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
+Reducing stack 0 by rule 4 (line 75):
    $1 = nterm exp (-1)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -192881,16 +191693,16 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 10
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 11 (line 115):
+Reducing stack 0 by rule 11 (line 102):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
@@ -192898,7 +191710,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 26
-Reducing stack 0 by rule 13 (line 117):
+Reducing stack 0 by rule 13 (line 104):
    $1 = token '(' ()
    $2 = nterm exp (-1)
    $3 = token ')' ()
@@ -192909,16 +191721,16 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 32
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 116):
+Reducing stack 0 by rule 12 (line 103):
    $1 = nterm exp (-1)
    $2 = token '^' ()
    $3 = nterm exp (2)
@@ -192928,16 +191740,16 @@
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
+Reducing stack 0 by rule 6 (line 80):
    $1 = nterm exp (1)
    $2 = token '=' ()
    $3 = nterm exp (1)
@@ -192946,12 +191758,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
+Reducing stack 0 by rule 4 (line 75):
    $1 = nterm exp (1)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -192960,11 +191772,11 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 3
-Reducing stack 0 by rule 3 (line 87):
+Reducing stack 0 by rule 3 (line 74):
    $1 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -192982,28 +191794,28 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 10
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 115):
+Reducing stack 0 by rule 11 (line 102):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
 Entering state 10
 Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 115):
+Reducing stack 0 by rule 11 (line 102):
    $1 = token '-' ()
    $2 = nterm exp (-1)
 -> $$ = nterm exp (1)
 Entering state 10
 Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 115):
+Reducing stack 0 by rule 11 (line 102):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
@@ -193016,22 +191828,22 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 10
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 115):
+Reducing stack 0 by rule 11 (line 102):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
 Entering state 27
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
+Reducing stack 0 by rule 6 (line 80):
    $1 = nterm exp (-1)
    $2 = token '=' ()
    $3 = nterm exp (-1)
@@ -193040,12 +191852,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
+Reducing stack 0 by rule 4 (line 75):
    $1 = nterm exp (-1)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -193054,21 +191866,21 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 3
-Reducing stack 0 by rule 3 (line 87):
+Reducing stack 0 by rule 3 (line 74):
    $1 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 8
 Reading a token
@@ -193076,16 +191888,16 @@
 Shifting token '-' ()
 Entering state 19
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 28
 Reading a token
 Next token is token '-' ()
-Reducing stack 0 by rule 8 (line 104):
+Reducing stack 0 by rule 8 (line 91):
    $1 = nterm exp (1)
    $2 = token '-' ()
    $3 = nterm exp (2)
@@ -193095,16 +191907,16 @@
 Shifting token '-' ()
 Entering state 19
 Reading a token
-Next token is token number (3)
-Shifting token number (3)
+Next token is token "number" (3)
+Shifting token "number" (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
 -> $$ = nterm exp (3)
 Entering state 28
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 8 (line 104):
+Reducing stack 0 by rule 8 (line 91):
    $1 = nterm exp (-1)
    $2 = token '-' ()
    $3 = nterm exp (3)
@@ -193118,22 +191930,22 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token number (4)
-Shifting token number (4)
+Next token is token "number" (4)
+Shifting token "number" (4)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (4)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (4)
 -> $$ = nterm exp (4)
 Entering state 10
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 115):
+Reducing stack 0 by rule 11 (line 102):
    $1 = token '-' ()
    $2 = nterm exp (4)
 -> $$ = nterm exp (-4)
 Entering state 27
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
+Reducing stack 0 by rule 6 (line 80):
    $1 = nterm exp (-4)
    $2 = token '=' ()
    $3 = nterm exp (-4)
@@ -193142,22 +191954,22 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
+Reducing stack 0 by rule 4 (line 75):
    $1 = nterm exp (-4)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 8
 Reading a token
@@ -193169,11 +191981,11 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 12
 Reading a token
@@ -193181,16 +191993,16 @@
 Shifting token '-' ()
 Entering state 19
 Reading a token
-Next token is token number (3)
-Shifting token number (3)
+Next token is token "number" (3)
+Shifting token "number" (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
 -> $$ = nterm exp (3)
 Entering state 28
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 104):
+Reducing stack 0 by rule 8 (line 91):
    $1 = nterm exp (2)
    $2 = token '-' ()
    $3 = nterm exp (3)
@@ -193199,7 +192011,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 26
-Reducing stack 0 by rule 13 (line 117):
+Reducing stack 0 by rule 13 (line 104):
    $1 = token '(' ()
    $2 = nterm exp (-1)
    $3 = token ')' ()
@@ -193207,7 +192019,7 @@
 Entering state 28
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 8 (line 104):
+Reducing stack 0 by rule 8 (line 91):
    $1 = nterm exp (1)
    $2 = token '-' ()
    $3 = nterm exp (-1)
@@ -193217,16 +192029,16 @@
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
+Reducing stack 0 by rule 6 (line 80):
    $1 = nterm exp (2)
    $2 = token '=' ()
    $3 = nterm exp (2)
@@ -193235,12 +192047,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
+Reducing stack 0 by rule 4 (line 75):
    $1 = nterm exp (2)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -193249,21 +192061,21 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 3
-Reducing stack 0 by rule 3 (line 87):
+Reducing stack 0 by rule 3 (line 74):
    $1 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 8
 Reading a token
@@ -193271,11 +192083,11 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 32
 Reading a token
@@ -193283,23 +192095,23 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token number (3)
-Shifting token number (3)
+Next token is token "number" (3)
+Shifting token "number" (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
 -> $$ = nterm exp (3)
 Entering state 32
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 116):
+Reducing stack 0 by rule 12 (line 103):
    $1 = nterm exp (2)
    $2 = token '^' ()
    $3 = nterm exp (3)
 -> $$ = nterm exp (8)
 Entering state 32
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 116):
+Reducing stack 0 by rule 12 (line 103):
    $1 = nterm exp (2)
    $2 = token '^' ()
    $3 = nterm exp (8)
@@ -193309,16 +192121,16 @@
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (256)
-Shifting token number (256)
+Next token is token "number" (256)
+Shifting token "number" (256)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (256)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (256)
 -> $$ = nterm exp (256)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
+Reducing stack 0 by rule 6 (line 80):
    $1 = nterm exp (256)
    $2 = token '=' ()
    $3 = nterm exp (256)
@@ -193327,12 +192139,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
+Reducing stack 0 by rule 4 (line 75):
    $1 = nterm exp (256)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -193342,11 +192154,11 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 12
 Reading a token
@@ -193354,16 +192166,16 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2)
+Shifting token "number" (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
 -> $$ = nterm exp (2)
 Entering state 32
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 12 (line 116):
+Reducing stack 0 by rule 12 (line 103):
    $1 = nterm exp (2)
    $2 = token '^' ()
    $3 = nterm exp (2)
@@ -193372,7 +192184,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 26
-Reducing stack 0 by rule 13 (line 117):
+Reducing stack 0 by rule 13 (line 104):
    $1 = token '(' ()
    $2 = nterm exp (4)
    $3 = token ')' ()
@@ -193383,16 +192195,16 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token number (3)
-Shifting token number (3)
+Next token is token "number" (3)
+Shifting token "number" (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (3)
 -> $$ = nterm exp (3)
 Entering state 32
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 116):
+Reducing stack 0 by rule 12 (line 103):
    $1 = nterm exp (4)
    $2 = token '^' ()
    $3 = nterm exp (3)
@@ -193402,16 +192214,16 @@
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (64)
-Shifting token number (64)
+Next token is token "number" (64)
+Shifting token "number" (64)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (64)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (64)
 -> $$ = nterm exp (64)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
+Reducing stack 0 by rule 6 (line 80):
    $1 = nterm exp (64)
    $2 = token '=' ()
    $3 = nterm exp (64)
@@ -193420,303 +192232,256 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
+Reducing stack 0 by rule 4 (line 75):
    $1 = nterm exp (64)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 83):
+Reducing stack 0 by rule 2 (line 70):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of file ()
+Shifting token "end of input" ()
 Entering state 16
-Cleanup: popping token end of file ()
+Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-input:
-  | 1 2
-./calc.at:1485:  $PREPARSER ./calc  input
-stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
-Next token is token number (2)
-syntax error, unexpected number
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token number (2)
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
 Entering state 8
 Reading a token
-Next token is token number (2)
-syntax error, unexpected number
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token number (2)
-./calc.at:1485: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1485: cat stderr
-input:
-  | 1//2
-./calc.at:1485:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
 Reading a token
-Next token is token '/' ()
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '/' ()
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
 Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
 Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
+Next token is token "number" (2222)
+Shifting token "number" (2222)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2222)
+-> $$ = nterm exp (2222)
+Entering state 27
 Reading a token
-Next token is token '/' ()
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '/' ()
-./calc.at:1485: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1485: cat stderr
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (2222)
+   $2 = token '=' ()
+   $3 = nterm exp (2222)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2222)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
-  | error
-./calc.at:1485:  $PREPARSER ./calc  input
+  | 1 2
+./calc.at:1489:  $PREPARSER ./calc  input
 stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token invalid token ()
-syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token ()
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token invalid token ()
-syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token ()
-./calc.at:1485: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1485: cat stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1485:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
 Entering state 8
 Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 27
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
-Next token is token '=' ()
-syntax error, unexpected '='
-Error: popping nterm exp (2)
-Error: popping token '=' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '=' ()
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
 Reading a token
 Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (2222)
+Shifting token "number" (2222)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2222)
+-> $$ = nterm exp (2222)
 Entering state 27
 Reading a token
-Next token is token '=' ()
-syntax error, unexpected '='
-Error: popping nterm exp (2)
-Error: popping token '=' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '=' ()
-./calc.at:1485: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1485: cat stderr
-input:
-  | 
-  | +1
-./calc.at:1485:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (2222)
+   $2 = token '=' ()
+   $3 = nterm exp (2222)
+-> $$ = nterm exp (2222)
+Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 87):
-   $1 = token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2222)
+   $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 82):
+Reducing stack 0 by rule 1 (line 69):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token '+' ()
-syntax error, unexpected '+'
-Error: popping nterm input ()
-Cleanup: discarding lookahead token '+' ()
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 87):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 8
 Reading a token
-Next token is token '+' ()
-syntax error, unexpected '+'
-Error: popping nterm input ()
-Cleanup: discarding lookahead token '+' ()
-./calc.at:1485: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1485: cat stderr
-./calc.at:1485:  $PREPARSER ./calc  /dev/null
+Next token is token "number" (2)
+syntax error, unexpected number
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token "number" (2)
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1482: cat stderr
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Now at end of input.
-syntax error, unexpected end of file
-Cleanup: discarding lookahead token end of file ()
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 8
 Reading a token
-Now at end of input.
-syntax error, unexpected end of file
-Cleanup: discarding lookahead token end of file ()
-./calc.at:1485: "$PERL" -pi -e 'use strict;
+Next token is token "number" (2)
+syntax error, unexpected number
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token "number" (2)
+./calc.at:1486: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -193726,10 +192491,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1485: cat stderr
 input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1485:  $PREPARSER ./calc  input
+  | (1 + 1) / (1 - 1)
+./calc.at:1482:  $PREPARSER ./calc  input
+./calc.at:1486: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -193738,247 +192503,334 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
 Next token is token ')' ()
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 11
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
 Next token is token ')' ()
 Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
    $1 = token '(' ()
-   $2 = token error ()
+   $2 = nterm exp (2)
    $3 = token ')' ()
--> $$ = nterm exp (1111)
+-> $$ = nterm exp (2)
 Entering state 8
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
 Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 12
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
-Entering state 29
+Entering state 28
 Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
+Next token is token ')' ()
+Reducing stack 0 by rule 8 (line 91):
    $1 = nterm exp (1)
-   $2 = token '+' ()
+   $2 = token '-' ()
    $3 = nterm exp (1)
+-> $$ = nterm exp (0)
+Entering state 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' ()
+   $2 = nterm exp (0)
+   $3 = token ')' ()
+-> $$ = nterm exp (0)
+Entering state 31
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 10 (line 93):
+   $1 = nterm exp (2)
+   $2 = token '/' ()
+   $3 = nterm exp (0)
+error: null divisor
 -> $$ = nterm exp (2)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
+./calc.at:1489: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | (1 + #) = 1111
+./calc.at:1486:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
 Entering state 12
+Reading a token
 Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 29
 Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (2)
+Next token is token ')' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (1)
--> $$ = nterm exp (3)
+-> $$ = nterm exp (2)
 Entering state 12
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token ')' ()
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' ()
-Error: popping nterm exp (3)
-Shifting token error ()
-Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
    $1 = token '(' ()
-   $2 = token error ()
+   $2 = nterm exp (2)
    $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
+-> $$ = nterm exp (2)
 Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Reading a token
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
 Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
 Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
 Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 28
 Reading a token
 Next token is token ')' ()
-Entering state 11
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1)
+   $2 = token '-' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (0)
+Entering state 12
 Next token is token ')' ()
 Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
    $1 = token '(' ()
-   $2 = token error ()
+   $2 = nterm exp (0)
    $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
+-> $$ = nterm exp (0)
+Entering state 31
 Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (2222)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (3333)
+Next token is token '\n' ()
+Reducing stack 0 by rule 10 (line 93):
+   $1 = nterm exp (2)
+   $2 = token '/' ()
+   $3 = nterm exp (0)
+error: null divisor
+-> $$ = nterm exp (2)
 Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+stderr:
+./calc.at:1489: cat stderr
+Starting parse
+Entering state 0
 Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 12
 Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 30
-Reading a token
-Next token is token '*' ()
-Reducing stack 0 by rule 9 (line 105):
-   $1 = nterm exp (1)
-   $2 = token '*' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' ()
-Error: popping nterm exp (2)
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Error: popping nterm exp (1)
 Shifting token error ()
 Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token error ()
+Error: discarding token error ()
 Reading a token
 Next token is token ')' ()
 Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (3333)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (4444)
 Entering state 8
+Reading a token
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1111)
+Shifting token "number" (1111)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (4444)
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
    $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 4444 != 1
--> $$ = nterm exp (4444)
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (4444)
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 82):
+Reducing stack 0 by rule 1 (line 69):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of file ()
+Shifting token "end of input" ()
 Entering state 16
-Cleanup: popping token end of file ()
+Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1482: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -193987,247 +192839,321 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token ')' ()
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Error: popping nterm exp (1)
 Shifting token error ()
 Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
 Entering state 8
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1111)
+Shifting token "number" (1111)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+input:
+  | 1//2
+./calc.at:1489:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
-Entering state 29
+Entering state 8
 Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token '/' ()
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '/' ()
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1482: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (2)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (3)
-Entering state 12
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Entering state 8
 Reading a token
-Next token is token ')' ()
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' ()
-Error: popping nterm exp (3)
-Shifting token error ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
 Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Next token is token '/' ()
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '/' ()
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+557. calc.at:1482:  ok
+./calc.at:1486: cat stderr
+./calc.at:1489: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+  | (# + 1) = 1111
+./calc.at:1486:  $PREPARSER ./calc  input
+
+./calc.at:1489: cat stderr
+stderr:
+Starting parse
+Entering state 0
 Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error: invalid character: '#'
+Next token is token error ()
 Shifting token error ()
 Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token error ()
+Error: discarding token error ()
 Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token '+' ()
+Error: discarding token '+' ()
 Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token "number" (1)
+Error: discarding token "number" (1)
 Reading a token
 Next token is token ')' ()
 Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (2222)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (3333)
 Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1111)
+Shifting token "number" (1111)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
 Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 30
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+input:
+  | error
+./calc.at:1489:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
 Reading a token
-Next token is token '*' ()
-Reducing stack 0 by rule 9 (line 105):
-   $1 = nterm exp (1)
-   $2 = token '*' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' ()
-Error: popping nterm exp (2)
+syntax error: invalid character: '#'
+Next token is token error ()
 Shifting token error ()
 Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Reading a token
+Next token is token "number" (1)
+Error: discarding token "number" (1)
 Reading a token
 Next token is token ')' ()
 Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (3333)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (4444)
 Entering state 8
+Reading a token
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1111)
+Shifting token "number" (1111)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (4444)
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
    $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 4444 != 1
--> $$ = nterm exp (4444)
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (4444)
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 82):
+Reducing stack 0 by rule 1 (line 69):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of file ()
+Shifting token "end of input" ()
 Entering state 16
-Cleanup: popping token end of file ()
+Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-./calc.at:1485: "$PERL" -pi -e 'use strict;
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "invalid token" ()
+syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" ()
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "invalid token" ()
+syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" ()
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1486: cat stderr
+./calc.at:1489: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -194237,10 +193163,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1485: cat stderr
 input:
-  | (!!) + (1 2) = 1
-./calc.at:1485:  $PREPARSER ./calc  input
+./calc.at:1489: cat stderr
+  | (1 + # + 1) = 1111
+./calc.at:1486:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -194249,111 +193175,86 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 15
-Reducing stack 0 by rule 16 (line 120):
-   $1 = token '!' ()
-   $2 = token '!' ()
-Shifting token error ()
-Entering state 11
-Reading a token
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
 Reading a token
 Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token number (2)
-syntax error, unexpected number
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
 Error: popping nterm exp (1)
 Shifting token error ()
 Entering state 11
-Next token is token number (2)
-Error: discarding token number (2)
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Reading a token
+Next token is token "number" (1)
+Error: discarding token "number" (1)
 Reading a token
 Next token is token ')' ()
 Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
 Entering state 8
+Reading a token
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1111)
+Shifting token "number" (1111)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (2222)
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
    $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 2222 != 1
--> $$ = nterm exp (2222)
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (2222)
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 82):
+Reducing stack 0 by rule 1 (line 69):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of file ()
+Shifting token "end of input" ()
 Entering state 16
-Cleanup: popping token end of file ()
+Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 1 = 2 = 3
+./calc.at:1489:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -194362,111 +193263,119 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 15
-Reducing stack 0 by rule 16 (line 120):
-   $1 = token '!' ()
-   $2 = token '!' ()
-Shifting token error ()
-Entering state 11
-Reading a token
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
 Reading a token
 Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token number (2)
-syntax error, unexpected number
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
 Error: popping nterm exp (1)
 Shifting token error ()
 Entering state 11
-Next token is token number (2)
-Error: discarding token number (2)
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Reading a token
+Next token is token "number" (1)
+Error: discarding token "number" (1)
 Reading a token
 Next token is token ')' ()
 Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
 Entering state 8
+Reading a token
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1111)
+Shifting token "number" (1111)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (2222)
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
    $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 2222 != 1
--> $$ = nterm exp (2222)
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (2222)
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 82):
+Reducing stack 0 by rule 1 (line 69):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of file ()
+Shifting token "end of input" ()
 Entering state 16
-Cleanup: popping token end of file ()
+Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-./calc.at:1485: "$PERL" -pi -e 'use strict;
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token "number" (2)
+Shifting token "number" (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
+Entering state 27
+Reading a token
+Next token is token '=' ()
+syntax error, unexpected '='
+Error: popping nterm exp (2)
+Error: popping token '=' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '=' ()
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+566. calc.at:1491: testing Calculator glr2.cc %locations %header parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
+./calc.at:1491: mv calc.y.tmp calc.y
+
+./calc.at:1491: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+stderr:
+./calc.at:1486: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -194476,10 +193385,50 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1485: cat stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token "number" (2)
+Shifting token "number" (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
+Entering state 27
+Reading a token
+Next token is token '=' ()
+syntax error, unexpected '='
+Error: popping nterm exp (2)
+Error: popping token '=' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '=' ()
+./calc.at:1486: cat stderr
 input:
-  | (- *) + (1 2) = 1
-./calc.at:1485:  $PREPARSER ./calc  input
+./calc.at:1489: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | (1 + 1) / (1 - 1)
+./calc.at:1486:  $PREPARSER ./calc  input
+./calc.at:1489: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -194488,116 +193437,115 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
 Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 9
-Reducing stack 0 by rule 15 (line 119):
-   $1 = token '-' ()
-   $2 = token error ()
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 29
 Reading a token
 Next token is token ')' ()
-Entering state 11
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
 Next token is token ')' ()
 Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
    $1 = token '(' ()
-   $2 = token error ()
+   $2 = nterm exp (2)
    $3 = token ')' ()
--> $$ = nterm exp (1111)
+-> $$ = nterm exp (2)
 Entering state 8
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
 Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 12
 Reading a token
-Next token is token number (2)
-syntax error, unexpected number
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token number (2)
-Error: discarding token number (2)
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 28
 Reading a token
 Next token is token ')' ()
-Entering state 11
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1)
+   $2 = token '-' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (0)
+Entering state 12
 Next token is token ')' ()
 Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
    $1 = token '(' ()
-   $2 = token error ()
+   $2 = nterm exp (0)
    $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 27
+-> $$ = nterm exp (0)
+Entering state 31
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (2222)
-   $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 2222 != 1
--> $$ = nterm exp (2222)
+Reducing stack 0 by rule 10 (line 93):
+   $1 = nterm exp (2)
+   $2 = token '/' ()
+   $3 = nterm exp (0)
+error: null divisor
+-> $$ = nterm exp (2)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (2222)
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 82):
+Reducing stack 0 by rule 1 (line 69):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of file ()
+Shifting token "end of input" ()
 Entering state 16
-Cleanup: popping token end of file ()
+Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -194606,116 +193554,115 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
 Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 9
-Reducing stack 0 by rule 15 (line 119):
-   $1 = token '-' ()
-   $2 = token error ()
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 29
 Reading a token
 Next token is token ')' ()
-Entering state 11
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
 Next token is token ')' ()
 Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
    $1 = token '(' ()
-   $2 = token error ()
+   $2 = nterm exp (2)
    $3 = token ')' ()
--> $$ = nterm exp (1111)
+-> $$ = nterm exp (2)
 Entering state 8
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
 Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 12
 Reading a token
-Next token is token number (2)
-syntax error, unexpected number
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token number (2)
-Error: discarding token number (2)
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 28
 Reading a token
 Next token is token ')' ()
-Entering state 11
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1)
+   $2 = token '-' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (0)
+Entering state 12
 Next token is token ')' ()
 Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
    $1 = token '(' ()
-   $2 = token error ()
+   $2 = nterm exp (0)
    $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 27
+-> $$ = nterm exp (0)
+Entering state 31
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (2222)
-   $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 2222 != 1
--> $$ = nterm exp (2222)
+Reducing stack 0 by rule 10 (line 93):
+   $1 = nterm exp (2)
+   $2 = token '/' ()
+   $3 = nterm exp (0)
+error: null divisor
+-> $$ = nterm exp (2)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (2222)
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 82):
+Reducing stack 0 by rule 1 (line 69):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of file ()
+Shifting token "end of input" ()
 Entering state 16
-Cleanup: popping token end of file ()
+Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-./calc.at:1485: "$PERL" -pi -e 'use strict;
+./calc.at:1486: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -194725,10 +193672,100 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1485: cat stderr
 input:
-  | (* *) + (*) + (*)
-./calc.at:1485:  $PREPARSER ./calc  input
+  | 
+  | +1
+./calc.at:1489:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 74):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '+' ()
+syntax error, unexpected '+'
+Error: popping nterm input ()
+Cleanup: discarding lookahead token '+' ()
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1486: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 74):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '+' ()
+syntax error, unexpected '+'
+Error: popping nterm input ()
+Cleanup: discarding lookahead token '+' ()
+560. calc.at:1486:  ok
+./calc.at:1489: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+
+./calc.at:1489: cat stderr
+./calc.at:1489:  $PREPARSER ./calc  /dev/null
+stderr:
+./calc.at:1491: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" ()
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" ()
+567. calc.at:1492: testing Calculator C++ %glr-parser %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
+./calc.at:1492: mv calc.y.tmp calc.y
+
+./calc.at:1492: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+./calc.at:1489: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1489: cat stderr
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1489:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -194737,29 +193774,102 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Next token is token ')' ()
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
 Shifting token error ()
 Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
 Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (3)
+Entering state 12
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
 Next token is token ')' ()
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' ()
+Error: popping nterm exp (3)
+Shifting token error ()
 Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
-Entering state 8
+Entering state 29
 Reading a token
 Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
 Reading a token
@@ -194774,12 +193884,18 @@
 Next token is token '*' ()
 Error: discarding token '*' ()
 Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
 Next token is token ')' ()
 Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -194787,11 +193903,11 @@
 Entering state 29
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1111)
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2222)
    $2 = token '+' ()
    $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
+-> $$ = nterm exp (3333)
 Entering state 8
 Next token is token '+' ()
 Shifting token '+' ()
@@ -194801,8 +193917,41 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token "number" (2)
+Shifting token "number" (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
+Entering state 30
+Reading a token
+Next token is token '*' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1)
+   $2 = token '*' ()
+   $3 = nterm exp (2)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
 Next token is token '*' ()
 syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' ()
+Error: popping nterm exp (2)
 Shifting token error ()
 Entering state 11
 Next token is token '*' ()
@@ -194813,52 +193962,59 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
 Entering state 29
 Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (2222)
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (3333)
    $2 = token '+' ()
    $3 = nterm exp (1111)
--> $$ = nterm exp (3333)
+-> $$ = nterm exp (4444)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (4444)
+   $2 = token '=' ()
+   $3 = nterm exp (1)
+error: 4444 != 1
+-> $$ = nterm exp (4444)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (3333)
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (4444)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 82):
+Reducing stack 0 by rule 1 (line 69):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of file ()
+Shifting token "end of input" ()
 Entering state 16
-Cleanup: popping token end of file ()
+Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-stderr:
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stdout:
-./calc.at:1489: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc
-
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -194867,29 +194023,102 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Next token is token ')' ()
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
 Shifting token error ()
 Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
 Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (3)
+Entering state 12
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
 Next token is token ')' ()
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' ()
+Error: popping nterm exp (3)
+Shifting token error ()
 Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
-Entering state 8
+Entering state 29
 Reading a token
 Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
 Reading a token
@@ -194904,12 +194133,18 @@
 Next token is token '*' ()
 Error: discarding token '*' ()
 Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
 Next token is token ')' ()
 Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -194917,11 +194152,11 @@
 Entering state 29
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1111)
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2222)
    $2 = token '+' ()
    $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
+-> $$ = nterm exp (3333)
 Entering state 8
 Next token is token '+' ()
 Shifting token '+' ()
@@ -194931,8 +194166,41 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token "number" (2)
+Shifting token "number" (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2)
+-> $$ = nterm exp (2)
+Entering state 30
+Reading a token
+Next token is token '*' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1)
+   $2 = token '*' ()
+   $3 = nterm exp (2)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
 Next token is token '*' ()
 syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' ()
+Error: popping nterm exp (2)
 Shifting token error ()
 Entering state 11
 Next token is token '*' ()
@@ -194943,38 +194211,82 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
 Entering state 29
 Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (2222)
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (3333)
    $2 = token '+' ()
    $3 = nterm exp (1111)
--> $$ = nterm exp (3333)
+-> $$ = nterm exp (4444)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (4444)
+   $2 = token '=' ()
+   $3 = nterm exp (1)
+error: 4444 != 1
+-> $$ = nterm exp (4444)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (3333)
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (4444)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 82):
+Reducing stack 0 by rule 1 (line 69):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of file ()
+Shifting token "end of input" ()
 Entering state 16
-Cleanup: popping token end of file ()
+Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
+./calc.at:1492: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
+./calc.at:1489: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+stdout:
+./calc.at:1485: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc
+
 input:
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
@@ -194989,26 +194301,17 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
-./calc.at:1489:  $PREPARSER ./calc  input
-./calc.at:1485: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+./calc.at:1485:  $PREPARSER ./calc  input
+./calc.at:1489: cat stderr
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 8
 Reading a token
@@ -195016,11 +194319,11 @@
 Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 29
 Reading a token
@@ -195028,23 +194331,23 @@
 Shifting token '*' ()
 Entering state 21
 Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
+Next token is token number (3)
+Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
 -> $$ = nterm exp (3)
 Entering state 30
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 9 (line 92):
+Reducing stack 0 by rule 9 (line 105):
    $1 = nterm exp (2)
    $2 = token '*' ()
    $3 = nterm exp (3)
 -> $$ = nterm exp (6)
 Entering state 29
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (6)
@@ -195054,16 +194357,16 @@
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (7)
-Shifting token "number" (7)
+Next token is token number (7)
+Shifting token number (7)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (7)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (7)
 -> $$ = nterm exp (7)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (7)
    $2 = token '=' ()
    $3 = nterm exp (7)
@@ -195072,21 +194375,21 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (7)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 8
 Reading a token
@@ -195094,11 +194397,11 @@
 Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 29
 Reading a token
@@ -195110,29 +194413,29 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
+Next token is token number (3)
+Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
 -> $$ = nterm exp (3)
 Entering state 10
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 102):
+Reducing stack 0 by rule 11 (line 115):
    $1 = token '-' ()
    $2 = nterm exp (3)
 -> $$ = nterm exp (-3)
 Entering state 30
 Next token is token '=' ()
-Reducing stack 0 by rule 9 (line 92):
+Reducing stack 0 by rule 9 (line 105):
    $1 = nterm exp (2)
    $2 = token '*' ()
    $3 = nterm exp (-3)
 -> $$ = nterm exp (-6)
 Entering state 29
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (-6)
@@ -195146,22 +194449,22 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token "number" (5)
-Shifting token "number" (5)
+Next token is token number (5)
+Shifting token number (5)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (5)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (5)
 -> $$ = nterm exp (5)
 Entering state 10
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 102):
+Reducing stack 0 by rule 11 (line 115):
    $1 = token '-' ()
    $2 = nterm exp (5)
 -> $$ = nterm exp (-5)
 Entering state 27
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (-5)
    $2 = token '=' ()
    $3 = nterm exp (-5)
@@ -195170,12 +194473,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (-5)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -195184,11 +194487,11 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 3
-Reducing stack 0 by rule 3 (line 74):
+Reducing stack 0 by rule 3 (line 87):
    $1 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -195198,11 +194501,11 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 10
 Reading a token
@@ -195210,23 +194513,23 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 32
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 103):
+Reducing stack 0 by rule 12 (line 116):
    $1 = nterm exp (1)
    $2 = token '^' ()
    $3 = nterm exp (2)
 -> $$ = nterm exp (1)
 Entering state 10
 Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 102):
+Reducing stack 0 by rule 11 (line 115):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
@@ -195239,22 +194542,22 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 10
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 102):
+Reducing stack 0 by rule 11 (line 115):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
 Entering state 27
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (-1)
    $2 = token '=' ()
    $3 = nterm exp (-1)
@@ -195263,12 +194566,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (-1)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -195282,16 +194585,16 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 10
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 11 (line 102):
+Reducing stack 0 by rule 11 (line 115):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
@@ -195299,7 +194602,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+Reducing stack 0 by rule 13 (line 117):
    $1 = token '(' ()
    $2 = nterm exp (-1)
    $3 = token ')' ()
@@ -195310,16 +194613,16 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 32
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 103):
+Reducing stack 0 by rule 12 (line 116):
    $1 = nterm exp (-1)
    $2 = token '^' ()
    $3 = nterm exp (2)
@@ -195329,16 +194632,16 @@
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (1)
    $2 = token '=' ()
    $3 = nterm exp (1)
@@ -195347,12 +194650,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (1)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -195361,11 +194664,11 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 3
-Reducing stack 0 by rule 3 (line 74):
+Reducing stack 0 by rule 3 (line 87):
    $1 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -195383,28 +194686,28 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 10
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 102):
+Reducing stack 0 by rule 11 (line 115):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
 Entering state 10
 Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 102):
+Reducing stack 0 by rule 11 (line 115):
    $1 = token '-' ()
    $2 = nterm exp (-1)
 -> $$ = nterm exp (1)
 Entering state 10
 Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 102):
+Reducing stack 0 by rule 11 (line 115):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
@@ -195417,22 +194720,22 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 10
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 102):
+Reducing stack 0 by rule 11 (line 115):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
 Entering state 27
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (-1)
    $2 = token '=' ()
    $3 = nterm exp (-1)
@@ -195441,12 +194744,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (-1)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -195455,21 +194758,21 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 3
-Reducing stack 0 by rule 3 (line 74):
+Reducing stack 0 by rule 3 (line 87):
    $1 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 8
 Reading a token
@@ -195477,16 +194780,16 @@
 Shifting token '-' ()
 Entering state 19
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 28
 Reading a token
 Next token is token '-' ()
-Reducing stack 0 by rule 8 (line 91):
+Reducing stack 0 by rule 8 (line 104):
    $1 = nterm exp (1)
    $2 = token '-' ()
    $3 = nterm exp (2)
@@ -195496,16 +194799,16 @@
 Shifting token '-' ()
 Entering state 19
 Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
+Next token is token number (3)
+Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
 -> $$ = nterm exp (3)
 Entering state 28
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 8 (line 91):
+Reducing stack 0 by rule 8 (line 104):
    $1 = nterm exp (-1)
    $2 = token '-' ()
    $3 = nterm exp (3)
@@ -195519,22 +194822,22 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token "number" (4)
-Shifting token "number" (4)
+Next token is token number (4)
+Shifting token number (4)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (4)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (4)
 -> $$ = nterm exp (4)
 Entering state 10
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 102):
+Reducing stack 0 by rule 11 (line 115):
    $1 = token '-' ()
    $2 = nterm exp (4)
 -> $$ = nterm exp (-4)
 Entering state 27
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (-4)
    $2 = token '=' ()
    $3 = nterm exp (-4)
@@ -195543,22 +194846,22 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (-4)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 8
 Reading a token
@@ -195570,11 +194873,11 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 12
 Reading a token
@@ -195582,16 +194885,16 @@
 Shifting token '-' ()
 Entering state 19
 Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
+Next token is token number (3)
+Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
 -> $$ = nterm exp (3)
 Entering state 28
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 91):
+Reducing stack 0 by rule 8 (line 104):
    $1 = nterm exp (2)
    $2 = token '-' ()
    $3 = nterm exp (3)
@@ -195600,7 +194903,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+Reducing stack 0 by rule 13 (line 117):
    $1 = token '(' ()
    $2 = nterm exp (-1)
    $3 = token ')' ()
@@ -195608,7 +194911,7 @@
 Entering state 28
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 8 (line 91):
+Reducing stack 0 by rule 8 (line 104):
    $1 = nterm exp (1)
    $2 = token '-' ()
    $3 = nterm exp (-1)
@@ -195618,16 +194921,16 @@
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (2)
    $2 = token '=' ()
    $3 = nterm exp (2)
@@ -195636,12 +194939,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (2)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -195650,21 +194953,21 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 3
-Reducing stack 0 by rule 3 (line 74):
+Reducing stack 0 by rule 3 (line 87):
    $1 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 8
 Reading a token
@@ -195672,11 +194975,11 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 32
 Reading a token
@@ -195684,23 +194987,23 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
+Next token is token number (3)
+Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
 -> $$ = nterm exp (3)
 Entering state 32
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 103):
+Reducing stack 0 by rule 12 (line 116):
    $1 = nterm exp (2)
    $2 = token '^' ()
    $3 = nterm exp (3)
 -> $$ = nterm exp (8)
 Entering state 32
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 103):
+Reducing stack 0 by rule 12 (line 116):
    $1 = nterm exp (2)
    $2 = token '^' ()
    $3 = nterm exp (8)
@@ -195710,16 +195013,16 @@
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (256)
-Shifting token "number" (256)
+Next token is token number (256)
+Shifting token number (256)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (256)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (256)
 -> $$ = nterm exp (256)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (256)
    $2 = token '=' ()
    $3 = nterm exp (256)
@@ -195728,12 +195031,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (256)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -195743,11 +195046,11 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 12
 Reading a token
@@ -195755,16 +195058,16 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 32
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 12 (line 103):
+Reducing stack 0 by rule 12 (line 116):
    $1 = nterm exp (2)
    $2 = token '^' ()
    $3 = nterm exp (2)
@@ -195773,7 +195076,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+Reducing stack 0 by rule 13 (line 117):
    $1 = token '(' ()
    $2 = nterm exp (4)
    $3 = token ')' ()
@@ -195784,16 +195087,16 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
+Next token is token number (3)
+Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
 -> $$ = nterm exp (3)
 Entering state 32
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 103):
+Reducing stack 0 by rule 12 (line 116):
    $1 = nterm exp (4)
    $2 = token '^' ()
    $3 = nterm exp (3)
@@ -195803,16 +195106,16 @@
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (64)
-Shifting token "number" (64)
+Next token is token number (64)
+Shifting token number (64)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (64)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (64)
 -> $$ = nterm exp (64)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (64)
    $2 = token '=' ()
    $3 = nterm exp (64)
@@ -195821,33 +195124,33 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (64)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token "end of input" ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token "end of input" ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1485: cat stderr
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+input:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 8
 Reading a token
@@ -195855,11 +195158,11 @@
 Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 29
 Reading a token
@@ -195867,23 +195170,23 @@
 Shifting token '*' ()
 Entering state 21
 Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
+Next token is token number (3)
+Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
 -> $$ = nterm exp (3)
 Entering state 30
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 9 (line 92):
+Reducing stack 0 by rule 9 (line 105):
    $1 = nterm exp (2)
    $2 = token '*' ()
    $3 = nterm exp (3)
 -> $$ = nterm exp (6)
 Entering state 29
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (6)
@@ -195893,16 +195196,16 @@
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (7)
-Shifting token "number" (7)
+Next token is token number (7)
+Shifting token number (7)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (7)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (7)
 -> $$ = nterm exp (7)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (7)
    $2 = token '=' ()
    $3 = nterm exp (7)
@@ -195911,21 +195214,21 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (7)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 8
 Reading a token
@@ -195933,11 +195236,11 @@
 Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 29
 Reading a token
@@ -195949,29 +195252,29 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
+Next token is token number (3)
+Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
 -> $$ = nterm exp (3)
 Entering state 10
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 102):
+Reducing stack 0 by rule 11 (line 115):
    $1 = token '-' ()
    $2 = nterm exp (3)
 -> $$ = nterm exp (-3)
 Entering state 30
 Next token is token '=' ()
-Reducing stack 0 by rule 9 (line 92):
+Reducing stack 0 by rule 9 (line 105):
    $1 = nterm exp (2)
    $2 = token '*' ()
    $3 = nterm exp (-3)
 -> $$ = nterm exp (-6)
 Entering state 29
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (-6)
@@ -195985,22 +195288,22 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token "number" (5)
-Shifting token "number" (5)
+Next token is token number (5)
+Shifting token number (5)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (5)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (5)
 -> $$ = nterm exp (5)
 Entering state 10
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 102):
+Reducing stack 0 by rule 11 (line 115):
    $1 = token '-' ()
    $2 = nterm exp (5)
 -> $$ = nterm exp (-5)
 Entering state 27
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (-5)
    $2 = token '=' ()
    $3 = nterm exp (-5)
@@ -196009,12 +195312,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (-5)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -196023,11 +195326,11 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 3
-Reducing stack 0 by rule 3 (line 74):
+Reducing stack 0 by rule 3 (line 87):
    $1 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -196037,11 +195340,11 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 10
 Reading a token
@@ -196049,23 +195352,23 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 32
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 103):
+Reducing stack 0 by rule 12 (line 116):
    $1 = nterm exp (1)
    $2 = token '^' ()
    $3 = nterm exp (2)
 -> $$ = nterm exp (1)
 Entering state 10
 Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 102):
+Reducing stack 0 by rule 11 (line 115):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
@@ -196078,22 +195381,22 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 10
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 102):
+Reducing stack 0 by rule 11 (line 115):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
 Entering state 27
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (-1)
    $2 = token '=' ()
    $3 = nterm exp (-1)
@@ -196102,12 +195405,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (-1)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -196121,16 +195424,16 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 10
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 11 (line 102):
+Reducing stack 0 by rule 11 (line 115):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
@@ -196138,7 +195441,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+Reducing stack 0 by rule 13 (line 117):
    $1 = token '(' ()
    $2 = nterm exp (-1)
    $3 = token ')' ()
@@ -196149,16 +195452,16 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 32
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 103):
+Reducing stack 0 by rule 12 (line 116):
    $1 = nterm exp (-1)
    $2 = token '^' ()
    $3 = nterm exp (2)
@@ -196168,16 +195471,16 @@
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (1)
    $2 = token '=' ()
    $3 = nterm exp (1)
@@ -196186,12 +195489,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (1)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -196200,11 +195503,11 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 3
-Reducing stack 0 by rule 3 (line 74):
+Reducing stack 0 by rule 3 (line 87):
    $1 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -196222,28 +195525,28 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 10
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 102):
+Reducing stack 0 by rule 11 (line 115):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
 Entering state 10
 Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 102):
+Reducing stack 0 by rule 11 (line 115):
    $1 = token '-' ()
    $2 = nterm exp (-1)
 -> $$ = nterm exp (1)
 Entering state 10
 Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 102):
+Reducing stack 0 by rule 11 (line 115):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
@@ -196256,22 +195559,22 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 10
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 102):
+Reducing stack 0 by rule 11 (line 115):
    $1 = token '-' ()
    $2 = nterm exp (1)
 -> $$ = nterm exp (-1)
 Entering state 27
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (-1)
    $2 = token '=' ()
    $3 = nterm exp (-1)
@@ -196280,12 +195583,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (-1)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -196294,21 +195597,21 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 3
-Reducing stack 0 by rule 3 (line 74):
+Reducing stack 0 by rule 3 (line 87):
    $1 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 8
 Reading a token
@@ -196316,16 +195619,16 @@
 Shifting token '-' ()
 Entering state 19
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 28
 Reading a token
 Next token is token '-' ()
-Reducing stack 0 by rule 8 (line 91):
+Reducing stack 0 by rule 8 (line 104):
    $1 = nterm exp (1)
    $2 = token '-' ()
    $3 = nterm exp (2)
@@ -196335,16 +195638,16 @@
 Shifting token '-' ()
 Entering state 19
 Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
+Next token is token number (3)
+Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
 -> $$ = nterm exp (3)
 Entering state 28
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 8 (line 91):
+Reducing stack 0 by rule 8 (line 104):
    $1 = nterm exp (-1)
    $2 = token '-' ()
    $3 = nterm exp (3)
@@ -196358,22 +195661,22 @@
 Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token "number" (4)
-Shifting token "number" (4)
+Next token is token number (4)
+Shifting token number (4)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (4)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (4)
 -> $$ = nterm exp (4)
 Entering state 10
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 102):
+Reducing stack 0 by rule 11 (line 115):
    $1 = token '-' ()
    $2 = nterm exp (4)
 -> $$ = nterm exp (-4)
 Entering state 27
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (-4)
    $2 = token '=' ()
    $3 = nterm exp (-4)
@@ -196382,22 +195685,22 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (-4)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 8
 Reading a token
@@ -196409,11 +195712,11 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 12
 Reading a token
@@ -196421,16 +195724,16 @@
 Shifting token '-' ()
 Entering state 19
 Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
+Next token is token number (3)
+Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
 -> $$ = nterm exp (3)
 Entering state 28
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 91):
+Reducing stack 0 by rule 8 (line 104):
    $1 = nterm exp (2)
    $2 = token '-' ()
    $3 = nterm exp (3)
@@ -196439,7 +195742,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+Reducing stack 0 by rule 13 (line 117):
    $1 = token '(' ()
    $2 = nterm exp (-1)
    $3 = token ')' ()
@@ -196447,7 +195750,7 @@
 Entering state 28
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 8 (line 91):
+Reducing stack 0 by rule 8 (line 104):
    $1 = nterm exp (1)
    $2 = token '-' ()
    $3 = nterm exp (-1)
@@ -196457,16 +195760,16 @@
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (2)
    $2 = token '=' ()
    $3 = nterm exp (2)
@@ -196475,12 +195778,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (2)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -196489,21 +195792,21 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 3
-Reducing stack 0 by rule 3 (line 74):
+Reducing stack 0 by rule 3 (line 87):
    $1 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 8
 Reading a token
@@ -196511,11 +195814,11 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 32
 Reading a token
@@ -196523,23 +195826,23 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
+Next token is token number (3)
+Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
 -> $$ = nterm exp (3)
 Entering state 32
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 103):
+Reducing stack 0 by rule 12 (line 116):
    $1 = nterm exp (2)
    $2 = token '^' ()
    $3 = nterm exp (3)
 -> $$ = nterm exp (8)
 Entering state 32
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 103):
+Reducing stack 0 by rule 12 (line 116):
    $1 = nterm exp (2)
    $2 = token '^' ()
    $3 = nterm exp (8)
@@ -196549,16 +195852,16 @@
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (256)
-Shifting token "number" (256)
+Next token is token number (256)
+Shifting token number (256)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (256)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (256)
 -> $$ = nterm exp (256)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (256)
    $2 = token '=' ()
    $3 = nterm exp (256)
@@ -196567,12 +195870,12 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (256)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
@@ -196582,11 +195885,11 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 12
 Reading a token
@@ -196594,16 +195897,16 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 32
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 12 (line 103):
+Reducing stack 0 by rule 12 (line 116):
    $1 = nterm exp (2)
    $2 = token '^' ()
    $3 = nterm exp (2)
@@ -196612,7 +195915,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+Reducing stack 0 by rule 13 (line 117):
    $1 = token '(' ()
    $2 = nterm exp (4)
    $3 = token ')' ()
@@ -196623,16 +195926,16 @@
 Shifting token '^' ()
 Entering state 23
 Reading a token
-Next token is token "number" (3)
-Shifting token "number" (3)
+Next token is token number (3)
+Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (3)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
 -> $$ = nterm exp (3)
 Entering state 32
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 103):
+Reducing stack 0 by rule 12 (line 116):
    $1 = nterm exp (4)
    $2 = token '^' ()
    $3 = nterm exp (3)
@@ -196642,16 +195945,16 @@
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (64)
-Shifting token "number" (64)
+Next token is token number (64)
+Shifting token number (64)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (64)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (64)
 -> $$ = nterm exp (64)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (64)
    $2 = token '=' ()
    $3 = nterm exp (64)
@@ -196660,2077 +195963,26 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (64)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 17
-Reducing stack 0 by rule 2 (line 70):
+Reducing stack 0 by rule 2 (line 83):
    $1 = nterm input ()
    $2 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-input:
-  | 1 2
-./calc.at:1489:  $PREPARSER ./calc  input
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1485:  $PREPARSER ./calc  input
-stderr:
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 29
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 30
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 105):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
-Entering state 29
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 14
-Reducing stack 0 by rule 17 (line 121):
-   $1 = token '!' ()
-   $2 = token '+' ()
-Cleanup: popping token '+' ()
-Cleanup: popping nterm exp (7)
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token "number" (2)
-syntax error, unexpected number
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token "number" (2)
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 29
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 30
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 105):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
-Entering state 29
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 14
-Reducing stack 0 by rule 17 (line 121):
-   $1 = token '!' ()
-   $2 = token '+' ()
-Cleanup: popping token '+' ()
-Cleanup: popping nterm exp (7)
-input:
-stderr:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1485:  $PREPARSER ./calc  input
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token "number" (2)
-syntax error, unexpected number
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token "number" (2)
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 29
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 30
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 105):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
-Entering state 29
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 13
-Reducing stack 0 by rule 18 (line 122):
-   $1 = token '!' ()
-   $2 = token '-' ()
-Cleanup: popping token '+' ()
-Cleanup: popping nterm exp (7)
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1489: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-Starting parse
-Entering state 0
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 29
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 30
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 105):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
-Entering state 29
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 13
-Reducing stack 0 by rule 18 (line 122):
-   $1 = token '!' ()
-   $2 = token '-' ()
-Cleanup: popping token '+' ()
-Cleanup: popping nterm exp (7)
-./calc.at:1489: cat stderr
-./calc.at:1485: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-  | 1//2
-./calc.at:1485: cat stderr
-./calc.at:1489:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Reading a token
-Next token is token '/' ()
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '/' ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Reading a token
-Next token is token '/' ()
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '/' ()
-input:
-  | (#) + (#) = 2222
-./calc.at:1485:  $PREPARSER ./calc  input
-./calc.at:1489: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-./calc.at:1489: cat stderr
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (2222)
-Shifting token number (2222)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2222)
--> $$ = nterm exp (2222)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (2222)
-   $2 = token '=' ()
-   $3 = nterm exp (2222)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (2222)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (2222)
-Shifting token number (2222)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2222)
--> $$ = nterm exp (2222)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (2222)
-   $2 = token '=' ()
-   $3 = nterm exp (2222)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (2222)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-input:
-  | error
-./calc.at:1489:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "invalid token" ()
-syntax error, unexpected invalid token
-Cleanup: discarding lookahead token "invalid token" ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "invalid token" ()
-syntax error, unexpected invalid token
-Cleanup: discarding lookahead token "invalid token" ()
-./calc.at:1485: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1485: cat stderr
-./calc.at:1489: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-input:
-./calc.at:1489: cat stderr
-  | (1 + #) = 1111
-./calc.at:1485:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (1111)
-Shifting token number (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1489:  $PREPARSER ./calc  input
-stderr:
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 27
-Reading a token
-Next token is token '=' ()
-syntax error, unexpected '='
-Error: popping nterm exp (2)
-Error: popping token '=' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '=' ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (1111)
-Shifting token number (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 27
-Reading a token
-Next token is token '=' ()
-syntax error, unexpected '='
-Error: popping nterm exp (2)
-Error: popping token '=' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '=' ()
-./calc.at:1485: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1485: cat stderr
-./calc.at:1489: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1489: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1485:  $PREPARSER ./calc  input
-input:
-stderr:
-  | 
-  | +1
-./calc.at:1489:  $PREPARSER ./calc  input
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token number (1)
-Error: discarding token number (1)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (1111)
-Shifting token number (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token number (1)
-Error: discarding token number (1)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (1111)
-Shifting token number (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 74):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '+' ()
-syntax error, unexpected '+'
-Error: popping nterm input ()
-Cleanup: discarding lookahead token '+' ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1485: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-./calc.at:1485: cat stderr
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 74):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '+' ()
-syntax error, unexpected '+'
-Error: popping nterm input ()
-Cleanup: discarding lookahead token '+' ()
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1485:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token number (1)
-Error: discarding token number (1)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (1111)
-Shifting token number (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1489: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-./calc.at:1489: cat stderr
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token number (1)
-Error: discarding token number (1)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (1111)
-Shifting token number (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
 Shifting token end of file ()
 Entering state 16
 Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-./calc.at:1489:  $PREPARSER ./calc  /dev/null
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1485: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" ()
-./calc.at:1485: cat stderr
 input:
-  | (1 + 1) / (1 - 1)
+  | 1 2
 ./calc.at:1485:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1489: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1489: cat stderr
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 117):
-   $1 = token '(' ()
-   $2 = nterm exp (2)
-   $3 = token ')' ()
--> $$ = nterm exp (2)
-Entering state 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 28
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 104):
-   $1 = nterm exp (1)
-   $2 = token '-' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (0)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 117):
-   $1 = token '(' ()
-   $2 = nterm exp (0)
-   $3 = token ')' ()
--> $$ = nterm exp (0)
-Entering state 31
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 10 (line 106):
-   $1 = nterm exp (2)
-   $2 = token '/' ()
-   $3 = nterm exp (0)
-error: null divisor
--> $$ = nterm exp (2)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (2)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 117):
-   $1 = token '(' ()
-   $2 = nterm exp (2)
-   $3 = token ')' ()
--> $$ = nterm exp (2)
-Entering state 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 28
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 104):
-   $1 = nterm exp (1)
-   $2 = token '-' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (0)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 117):
-   $1 = token '(' ()
-   $2 = nterm exp (0)
-   $3 = token ')' ()
--> $$ = nterm exp (0)
-Entering state 31
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 10 (line 106):
-   $1 = nterm exp (2)
-   $2 = token '/' ()
-   $3 = nterm exp (0)
-error: null divisor
--> $$ = nterm exp (2)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (2)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1489:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token ')' ()
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (2)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (3)
-Entering state 12
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token ')' ()
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' ()
-Error: popping nterm exp (3)
-Shifting token error ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (2222)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (3333)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 30
-Reading a token
-Next token is token '*' ()
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1)
-   $2 = token '*' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' ()
-Error: popping nterm exp (2)
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (3333)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (4444)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (4444)
-   $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 4444 != 1
--> $$ = nterm exp (4444)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (4444)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1485: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token ')' ()
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (2)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (3)
-Entering state 12
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token ')' ()
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' ()
-Error: popping nterm exp (3)
-Shifting token error ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (2222)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (3333)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token "number" (2)
-Shifting token "number" (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2)
--> $$ = nterm exp (2)
-Entering state 30
-Reading a token
-Next token is token '*' ()
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1)
-   $2 = token '*' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' ()
-Error: popping nterm exp (2)
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (3333)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (4444)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (4444)
-   $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 4444 != 1
--> $$ = nterm exp (4444)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (4444)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1485: cat stderr
-559. calc.at:1485:  ok
-./calc.at:1489: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1489: cat stderr
-
-input:
   | (!!) + (1 2) = 1
 ./calc.at:1489:  $PREPARSER ./calc  input
 stderr:
@@ -198845,8 +196097,25 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token number (2)
+syntax error, unexpected number
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token number (2)
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -198958,6 +196227,21 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token number (2)
+syntax error, unexpected number
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token number (2)
 ./calc.at:1489: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -198968,11 +196252,25 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1485: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1489: cat stderr
+./calc.at:1485: cat stderr
 input:
   | (- *) + (1 2) = 1
 ./calc.at:1489:  $PREPARSER ./calc  input
 stderr:
+input:
+  | 1//2
+./calc.at:1485:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -199090,9 +196388,28 @@
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
 ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-568. calc.at:1492: testing Calculator glr2.cc %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
-./calc.at:1492: mv calc.y.tmp calc.y
-
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
+Reading a token
+Next token is token '/' ()
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '/' ()
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -199210,7 +196527,27 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-./calc.at:1492: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
+Reading a token
+Next token is token '/' ()
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '/' ()
 ./calc.at:1489: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -199222,9 +196559,24 @@
   }eg
 ' expout || exit 77
 ./calc.at:1489: cat stderr
+./calc.at:1485: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1485: cat stderr
 input:
   | (* *) + (*) + (*)
 ./calc.at:1489:  $PREPARSER ./calc  input
+input:
+  | error
+stderr:
+./calc.at:1485:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -199342,10 +196694,24 @@
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
 ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token invalid token ()
+syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token ()
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 stderr:
 Starting parse
 Entering state 0
 Reading a token
+Next token is token invalid token ()
+syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token ()
+Starting parse
+Entering state 0
+Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
@@ -199458,6 +196824,16 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
+./calc.at:1485: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1489: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -199468,12 +196844,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1492: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
 ./calc.at:1489: cat stderr
+./calc.at:1485: cat stderr
 input:
   | 1 + 2 * 3 + !+ ++
 ./calc.at:1489:  $PREPARSER ./calc  input
 stderr:
+input:
 Starting parse
 Entering state 0
 Reading a token
@@ -199540,6 +196917,8 @@
 Cleanup: popping token '+' ()
 Cleanup: popping nterm exp (7)
 ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 = 2 = 3
+./calc.at:1485:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -199606,9 +196985,70 @@
    $2 = token '+' ()
 Cleanup: popping token '+' ()
 Cleanup: popping nterm exp (7)
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 27
+Reading a token
+Next token is token '=' ()
+syntax error, unexpected '='
+Error: popping nterm exp (2)
+Error: popping token '=' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '=' ()
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
   | 1 + 2 * 3 + !- ++
+stderr:
 ./calc.at:1489:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 27
+Reading a token
+Next token is token '=' ()
+syntax error, unexpected '='
+Error: popping nterm exp (2)
+Error: popping token '=' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '=' ()
 stderr:
 Starting parse
 Entering state 0
@@ -199742,6 +197182,17 @@
    $2 = token '-' ()
 Cleanup: popping token '+' ()
 Cleanup: popping nterm exp (7)
+./calc.at:1485: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1485: cat stderr
 ./calc.at:1489: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -199754,107 +197205,46 @@
 ' expout || exit 77
 ./calc.at:1489: cat stderr
 input:
+stderr:
+  | 
+  | +1
+./calc.at:1485:  $PREPARSER ./calc  input
+stdout:
+input:
   | (#) + (#) = 2222
 ./calc.at:1489:  $PREPARSER ./calc  input
+./calc.at:1486: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc
+
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (2222)
-Shifting token "number" (2222)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2222)
--> $$ = nterm exp (2222)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (2222)
-   $2 = token '=' ()
-   $3 = nterm exp (2222)
--> $$ = nterm exp (2222)
-Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2222)
-   $2 = token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 87):
+   $1 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Next token is token '+' ()
+syntax error, unexpected '+'
+Error: popping nterm input ()
+Cleanup: discarding lookahead token '+' ()
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -199953,98 +197343,8 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-./calc.at:1489: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1489: cat stderr
 input:
-  | (1 + #) = 1111
-./calc.at:1489:  $PREPARSER ./calc  input
 stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
 ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
@@ -200054,98 +197354,6 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1489: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1489: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1489:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
 syntax error: invalid character: '#'
 Next token is token error ()
 Shifting token error ()
@@ -200153,12 +197361,6 @@
 Next token is token error ()
 Error: discarding token error ()
 Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token "number" (1)
-Error: discarding token "number" (1)
-Reading a token
 Next token is token ')' ()
 Entering state 11
 Next token is token ')' ()
@@ -200171,163 +197373,21 @@
 -> $$ = nterm exp (1111)
 Entering state 8
 Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
 Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token "number" (1)
-Error: discarding token "number" (1)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1489: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1489: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1489:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
+Shifting token '+' ()
+Entering state 20
 Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
 syntax error: invalid character: '#'
 Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
 Shifting token error ()
 Entering state 11
 Next token is token error ()
 Error: discarding token error ()
 Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token "number" (1)
-Error: discarding token "number" (1)
-Reading a token
 Next token is token ')' ()
 Entering state 11
 Next token is token ')' ()
@@ -200338,364 +197398,39 @@
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
-Entering state 8
+Entering state 29
 Reading a token
 Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 7 (line 90):
    $1 = nterm exp (1111)
-   $2 = token '=' ()
+   $2 = token '+' ()
    $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token "number" (1)
-Error: discarding token "number" (1)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
+-> $$ = nterm exp (2222)
 Entering state 8
-Reading a token
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (1111)
-Shifting token "number" (1111)
+Next token is token "number" (2222)
+Shifting token "number" (2222)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1111)
--> $$ = nterm exp (1111)
+   $1 = token "number" (2222)
+-> $$ = nterm exp (2222)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
+   $1 = nterm exp (2222)
    $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1489: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1489: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1489:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' ()
-   $2 = nterm exp (2)
-   $3 = token ')' ()
--> $$ = nterm exp (2)
-Entering state 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 28
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (1)
-   $2 = token '-' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (0)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' ()
-   $2 = nterm exp (0)
-   $3 = token ')' ()
--> $$ = nterm exp (0)
-Entering state 31
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 10 (line 93):
-   $1 = nterm exp (2)
-   $2 = token '/' ()
-   $3 = nterm exp (0)
-error: null divisor
--> $$ = nterm exp (2)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" ()
-Entering state 16
-Cleanup: popping token "end of input" ()
-Cleanup: popping nterm input ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' ()
-   $2 = nterm exp (2)
-   $3 = token ')' ()
--> $$ = nterm exp (2)
-Entering state 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
-Reading a token
-Next token is token "number" (1)
-Shifting token "number" (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1)
--> $$ = nterm exp (1)
-Entering state 28
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (1)
-   $2 = token '-' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (0)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' ()
-   $2 = nterm exp (0)
-   $3 = token ')' ()
--> $$ = nterm exp (0)
-Entering state 31
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 10 (line 93):
-   $1 = nterm exp (2)
-   $2 = token '/' ()
-   $3 = nterm exp (0)
-error: null divisor
--> $$ = nterm exp (2)
+   $3 = nterm exp (2222)
+-> $$ = nterm exp (2222)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2)
+   $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
@@ -200709,38 +197444,6 @@
 Entering state 16
 Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-./calc.at:1489: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1489: cat stderr
-563. calc.at:1489:  ok
-
-569. calc.at:1494: testing Calculator C++ %glr-parser %no-lines %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
-./calc.at:1494: mv calc.y.tmp calc.y
-
-./calc.at:1494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1494: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
-stderr:
-stdout:
-./calc.at:1486: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc
-
-input:
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -200754,6 +197457,25 @@
   | 
   | 2^2^3 = 256
   | (2^2)^3 = 64
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 87):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 82):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '+' ()
+syntax error, unexpected '+'
+Error: popping nterm input ()
+Cleanup: discarding lookahead token '+' ()
 ./calc.at:1486:  $PREPARSER ./calc  input
 stderr:
 Starting parse
@@ -201594,6 +198316,16 @@
 Cleanup: popping nterm input ()
 ./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1489: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Reading a token
@@ -202430,9 +199162,30 @@
 Entering state 16
 Cleanup: popping token end of input ()
 Cleanup: popping nterm input ()
+./calc.at:1489: cat stderr
+./calc.at:1485: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 input:
+./calc.at:1485: cat stderr
   | 1 2
 ./calc.at:1486:  $PREPARSER ./calc  input
+./calc.at:1485:  $PREPARSER ./calc  /dev/null
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+syntax error, unexpected end of file
+Cleanup: discarding lookahead token end of file ()
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -202449,7 +199202,11 @@
 syntax error, unexpected number
 Error: popping nterm exp (1)
 Cleanup: discarding lookahead token number (2)
+input:
 ./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (1 + #) = 1111
+./calc.at:1489:  $PREPARSER ./calc  input
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -202466,212 +199223,157 @@
 syntax error, unexpected number
 Error: popping nterm exp (1)
 Cleanup: discarding lookahead token number (2)
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1486: cat stderr
-input:
-  | 1//2
-./calc.at:1486:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Reading a token
-Next token is token '/' ()
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '/' ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+Now at end of input.
+syntax error, unexpected end of file
+Cleanup: discarding lookahead token end of file ()
 Starting parse
 Entering state 0
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1)
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
-Entering state 8
+Entering state 12
 Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token '/' ()
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' ()
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
 Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '/' ()
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1486: cat stderr
-input:
-  | error
-./calc.at:1486:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token invalid token ()
-syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token invalid token ()
-syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token ()
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1486: cat stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1486:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
 Entering state 8
 Reading a token
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (1111)
+Shifting token "number" (1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (2)
--> $$ = nterm exp (2)
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
 Entering state 27
 Reading a token
-Next token is token '=' ()
-syntax error, unexpected '='
-Error: popping nterm exp (2)
-Error: popping token '=' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '=' ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1)
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
 Entering state 8
 Reading a token
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token "number" (1111)
+Shifting token "number" (1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (2)
--> $$ = nterm exp (2)
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
 Entering state 27
 Reading a token
-Next token is token '=' ()
-syntax error, unexpected '='
-Error: popping nterm exp (2)
-Error: popping token '=' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '=' ()
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1486: cat stderr
-input:
-  | 
-  | +1
-./calc.at:1486:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
 Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 74):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '+' ()
-syntax error, unexpected '+'
-Error: popping nterm input ()
-Cleanup: discarding lookahead token '+' ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 74):
-   $1 = token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
@@ -202679,11 +199381,12 @@
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token '+' ()
-syntax error, unexpected '+'
-Error: popping nterm input ()
-Cleanup: discarding lookahead token '+' ()
-./calc.at:1486: "$PERL" -pi -e 'use strict;
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+./calc.at:1485: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -202693,23 +199396,6 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1486: cat stderr
-./calc.at:1486:  $PREPARSER ./calc  /dev/null
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-syntax error, unexpected end of input
-Cleanup: discarding lookahead token end of input ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-syntax error, unexpected end of input
-Cleanup: discarding lookahead token end of input ()
 ./calc.at:1486: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -202720,11 +199406,25 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1485: cat stderr
+./calc.at:1489: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1486: cat stderr
 input:
+./calc.at:1489: cat stderr
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1486:  $PREPARSER ./calc  input
+./calc.at:1485:  $PREPARSER ./calc  input
 stderr:
+input:
+input:
 Starting parse
 Entering state 0
 Reading a token
@@ -202739,7 +199439,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -202757,7 +199457,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -202769,13 +199469,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 29
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (1)
@@ -202788,13 +199488,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 29
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (2)
    $2 = token '+' ()
    $3 = nterm exp (1)
@@ -202813,7 +199513,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -202821,7 +199521,7 @@
 Entering state 29
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1111)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -202853,7 +199553,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -202861,7 +199561,7 @@
 Entering state 29
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (2222)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -202878,7 +199578,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -202890,13 +199590,13 @@
 Next token is token number (2)
 Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 30
 Reading a token
 Next token is token '*' ()
-Reducing stack 0 by rule 9 (line 92):
+Reducing stack 0 by rule 9 (line 105):
    $1 = nterm exp (1)
    $2 = token '*' ()
    $3 = nterm exp (2)
@@ -202920,7 +199620,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -202928,7 +199628,7 @@
 Entering state 29
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (3333)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -202941,13 +199641,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (4444)
    $2 = token '=' ()
    $3 = nterm exp (1)
@@ -202957,22 +199657,24 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (4444)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (# + 1) = 1111
+./calc.at:1489:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -202988,7 +199690,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -203006,7 +199708,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -203018,13 +199720,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 29
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (1)
@@ -203037,13 +199739,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 29
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (2)
    $2 = token '+' ()
    $3 = nterm exp (1)
@@ -203062,7 +199764,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -203070,7 +199772,7 @@
 Entering state 29
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1111)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -203102,7 +199804,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -203110,7 +199812,7 @@
 Entering state 29
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (2222)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -203127,7 +199829,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -203139,13 +199841,13 @@
 Next token is token number (2)
 Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 30
 Reading a token
 Next token is token '*' ()
-Reducing stack 0 by rule 9 (line 92):
+Reducing stack 0 by rule 9 (line 105):
    $1 = nterm exp (1)
    $2 = token '*' ()
    $3 = nterm exp (2)
@@ -203169,7 +199871,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -203177,7 +199879,7 @@
 Entering state 29
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (3333)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -203190,13 +199892,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (4444)
    $2 = token '=' ()
    $3 = nterm exp (1)
@@ -203206,36 +199908,46 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (4444)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1486: cat stderr
-input:
-  | (!!) + (1 2) = 1
+  | 1//2
 ./calc.at:1486:  $PREPARSER ./calc  input
 stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
+Reading a token
+Next token is token '/' ()
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '/' ()
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -203243,19 +199955,21 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 15
-Reducing stack 0 by rule 16 (line 107):
-   $1 = token '!' ()
-   $2 = token '!' ()
+syntax error: invalid character: '#'
+Next token is token error ()
 Shifting token error ()
 Entering state 11
+Next token is token error ()
+Error: discarding token error ()
 Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Reading a token
+Next token is token "number" (1)
+Error: discarding token "number" (1)
+Reading a token
+Next token is token ')' ()
+Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
@@ -203266,13 +199980,47 @@
 -> $$ = nterm exp (1111)
 Entering state 8
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
+Next token is token "number" (1111)
+Shifting token "number" (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
 Reading a token
 Next token is token number (1)
 Shifting token number (1)
@@ -203280,15 +200028,47 @@
 Reducing stack 0 by rule 5 (line 79):
    $1 = token number (1)
 -> $$ = nterm exp (1)
-Entering state 12
+Entering state 8
 Reading a token
-Next token is token number (2)
-syntax error, unexpected number
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
+Reading a token
+Next token is token '/' ()
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' ()
 Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '/' ()
+stderr:
+./calc.at:1485: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
 Shifting token error ()
 Entering state 11
-Next token is token number (2)
-Error: discarding token number (2)
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Reading a token
+Next token is token "number" (1)
+Error: discarding token "number" (1)
 Reading a token
 Next token is token ')' ()
 Entering state 11
@@ -203300,40 +200080,32 @@
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
 Entering state 8
+Reading a token
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1111)
+Shifting token "number" (1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (2222)
+   $1 = nterm exp (1111)
    $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 2222 != 1
--> $$ = nterm exp (2222)
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2222)
+   $1 = nterm exp (1111)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
@@ -203343,12 +200115,37 @@
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token "end of input" ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1485: cat stderr
+input:
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | (!!) + (1 2) = 1
+./calc.at:1485:  $PREPARSER ./calc  input
+./calc.at:1489: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
+./calc.at:1486: cat stderr
+./calc.at:1489: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -203363,7 +200160,7 @@
 Next token is token '!' ()
 Shifting token '!' ()
 Entering state 15
-Reducing stack 0 by rule 16 (line 107):
+Reducing stack 0 by rule 16 (line 120):
    $1 = token '!' ()
    $2 = token '!' ()
 Shifting token error ()
@@ -203372,7 +200169,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -203390,7 +200187,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -203408,7 +200205,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -203416,7 +200213,7 @@
 Entering state 29
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1111)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -203429,13 +200226,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (2222)
    $2 = token '=' ()
    $3 = nterm exp (1)
@@ -203445,35 +200242,22 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1486: cat stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1486:  $PREPARSER ./calc  input
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -203482,28 +200266,23 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
 Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 9
-Reducing stack 0 by rule 15 (line 106):
-   $1 = token '-' ()
-   $2 = token error ()
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 15
+Reducing stack 0 by rule 16 (line 120):
+   $1 = token '!' ()
+   $2 = token '!' ()
 Shifting token error ()
 Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
 Reading a token
 Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -203521,7 +200300,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -203539,7 +200318,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -203547,7 +200326,7 @@
 Entering state 29
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1111)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -203560,13 +200339,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (2222)
    $2 = token '=' ()
    $3 = nterm exp (1)
@@ -203576,81 +200355,69 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | error
+./calc.at:1486:  $PREPARSER ./calc  input
+input:
+  | (1 + # + 1) = 1111
+./calc.at:1489:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 9
-Reducing stack 0 by rule 15 (line 106):
-   $1 = token '-' ()
-   $2 = token error ()
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Next token is token invalid token ()
+syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token ()
+stderr:
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
 Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1)
+Shifting token "number" (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1)
+   $1 = token "number" (1)
 -> $$ = nterm exp (1)
 Entering state 12
 Reading a token
-Next token is token number (2)
-syntax error, unexpected number
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
 Error: popping nterm exp (1)
 Shifting token error ()
 Entering state 11
-Next token is token number (2)
-Error: discarding token number (2)
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Reading a token
+Next token is token "number" (1)
+Error: discarding token "number" (1)
 Reading a token
 Next token is token ')' ()
 Entering state 11
@@ -203662,40 +200429,32 @@
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
 Entering state 8
+Reading a token
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1111)
+Shifting token "number" (1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (2222)
+   $1 = nterm exp (1111)
    $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 2222 != 1
--> $$ = nterm exp (2222)
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2222)
+   $1 = nterm exp (1111)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
@@ -203705,11 +200464,12 @@
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token "end of input" ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token "end of input" ()
 Cleanup: popping nterm input ()
-./calc.at:1486: "$PERL" -pi -e 'use strict;
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1485: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -203719,10 +200479,14 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1486: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1486:  $PREPARSER ./calc  input
+stderr:
+./calc.at:1485: cat stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token invalid token ()
+syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token ()
 stderr:
 Starting parse
 Entering state 0
@@ -203731,17 +200495,34 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
 Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token ')' ()
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Reading a token
+Next token is token "number" (1)
+Error: discarding token "number" (1)
+Reading a token
+Next token is token ')' ()
 Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
@@ -203753,6 +200534,103 @@
 -> $$ = nterm exp (1111)
 Entering state 8
 Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token "number" (1111)
+Shifting token "number" (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+input:
+  | (- *) + (1 2) = 1
+./calc.at:1485:  $PREPARSER ./calc  input
+./calc.at:1489: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+./calc.at:1489: cat stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 9
+Reducing stack 0 by rule 15 (line 119):
+   $1 = token '-' ()
+   $2 = token error ()
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 118):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
 Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
@@ -203761,9 +200639,102 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token number (2)
+syntax error, unexpected number
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token number (2)
+Error: discarding token number (2)
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 118):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 103):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (2222)
+   $2 = token '=' ()
+   $3 = nterm exp (1)
+error: 2222 != 1
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (2222)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 82):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of file ()
+Entering state 16
+Cleanup: popping token end of file ()
+Cleanup: popping nterm input ()
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+./calc.at:1486: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
+Reading a token
 Next token is token '*' ()
 syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 Shifting token error ()
+Entering state 9
+Reducing stack 0 by rule 15 (line 119):
+   $1 = token '-' ()
+   $2 = token error ()
+Shifting token error ()
 Entering state 11
 Next token is token '*' ()
 Error: discarding token '*' ()
@@ -203773,15 +200744,488 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token number (2)
+syntax error, unexpected number
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token number (2)
+Error: discarding token number (2)
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
 Entering state 29
 Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 103):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (2222)
+   $2 = token '=' ()
+   $3 = nterm exp (1)
+error: 2222 != 1
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (2222)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 82):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of file ()
+Entering state 16
+Cleanup: popping token end of file ()
+Cleanup: popping nterm input ()
+  | (1 + 1) / (1 - 1)
+./calc.at:1489:  $PREPARSER ./calc  input
+stderr:
+input:
+  | 1 = 2 = 3
+./calc.at:1486:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
+Next token is token ')' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' ()
+   $2 = nterm exp (2)
+   $3 = token ')' ()
+-> $$ = nterm exp (2)
+Entering state 8
+Reading a token
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 28
+Reading a token
+Next token is token ')' ()
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1)
+   $2 = token '-' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (0)
+Entering state 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' ()
+   $2 = nterm exp (0)
+   $3 = token ')' ()
+-> $$ = nterm exp (0)
+Entering state 31
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 10 (line 93):
+   $1 = nterm exp (2)
+   $2 = token '/' ()
+   $3 = nterm exp (0)
+error: null divisor
+-> $$ = nterm exp (2)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 27
+Reading a token
+Next token is token '=' ()
+syntax error, unexpected '='
+Error: popping nterm exp (2)
+Error: popping token '=' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '=' ()
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
 Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
+Next token is token ')' ()
 Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' ()
+   $2 = nterm exp (2)
+   $3 = token ')' ()
+-> $$ = nterm exp (2)
+Entering state 8
+Reading a token
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
+Reading a token
+Next token is token "number" (1)
+Shifting token "number" (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1)
+-> $$ = nterm exp (1)
+Entering state 28
+Reading a token
+Next token is token ')' ()
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1)
+   $2 = token '-' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (0)
+Entering state 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' ()
+   $2 = nterm exp (0)
+   $3 = token ')' ()
+-> $$ = nterm exp (0)
+Entering state 31
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 10 (line 93):
+   $1 = nterm exp (2)
+   $2 = token '/' ()
+   $3 = nterm exp (0)
+error: null divisor
+-> $$ = nterm exp (2)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" ()
+Entering state 16
+Cleanup: popping token "end of input" ()
+Cleanup: popping nterm input ()
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 27
+Reading a token
+Next token is token '=' ()
+syntax error, unexpected '='
+Error: popping nterm exp (2)
+Error: popping token '=' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '=' ()
+./calc.at:1485: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1485: cat stderr
+input:
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1489: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | (* *) + (*) + (*)
+./calc.at:1485:  $PREPARSER ./calc  input
+./calc.at:1489: cat stderr
+./calc.at:1486: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 118):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 118):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1111)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -203807,7 +201251,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -203815,7 +201259,7 @@
 Entering state 29
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (2222)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -203824,22 +201268,27 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (3333)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+563. calc.at:1489:  ok
+input:
+  | 
+  | +1
+./calc.at:1486:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -203863,7 +201312,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -203890,7 +201339,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -203898,7 +201347,7 @@
 Entering state 29
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1111)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -203924,7 +201373,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -203932,7 +201381,7 @@
 Entering state 29
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (2222)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -203941,22 +201390,44 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (3333)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-./calc.at:1486: "$PERL" -pi -e 'use strict;
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 74):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '+' ()
+syntax error, unexpected '+'
+Error: popping nterm input ()
+Cleanup: discarding lookahead token '+' ()
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
+./calc.at:1485: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -203966,10 +201437,43 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1486: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 74):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '+' ()
+syntax error, unexpected '+'
+Error: popping nterm input ()
+Cleanup: discarding lookahead token '+' ()
+./calc.at:1485: cat stderr
 input:
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
   | 1 + 2 * 3 + !+ ++
-./calc.at:1486:  $PREPARSER ./calc  input
+./calc.at:1485:  $PREPARSER ./calc  input
+./calc.at:1486: cat stderr
+./calc.at:1486:  $PREPARSER ./calc  /dev/null
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -203977,7 +201481,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 8
@@ -203989,7 +201493,7 @@
 Next token is token number (2)
 Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 29
@@ -204001,20 +201505,20 @@
 Next token is token number (3)
 Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (3)
 -> $$ = nterm exp (3)
 Entering state 30
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 92):
+Reducing stack 0 by rule 9 (line 105):
    $1 = nterm exp (2)
    $2 = token '*' ()
    $3 = nterm exp (3)
 -> $$ = nterm exp (6)
 Entering state 29
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (6)
@@ -204031,20 +201535,28 @@
 Next token is token '+' ()
 Shifting token '+' ()
 Entering state 14
-Reducing stack 0 by rule 17 (line 108):
+Reducing stack 0 by rule 17 (line 121):
    $1 = token '!' ()
    $2 = token '+' ()
 Cleanup: popping token '+' ()
 Cleanup: popping nterm exp (7)
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+syntax error, unexpected end of input
+Cleanup: discarding lookahead token end of input ()
 ./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
 Starting parse
 Entering state 0
 Reading a token
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 8
@@ -204056,7 +201568,7 @@
 Next token is token number (2)
 Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 29
@@ -204068,20 +201580,20 @@
 Next token is token number (3)
 Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (3)
 -> $$ = nterm exp (3)
 Entering state 30
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 92):
+Reducing stack 0 by rule 9 (line 105):
    $1 = nterm exp (2)
    $2 = token '*' ()
    $3 = nterm exp (3)
 -> $$ = nterm exp (6)
 Entering state 29
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (6)
@@ -204098,22 +201610,32 @@
 Next token is token '+' ()
 Shifting token '+' ()
 Entering state 14
-Reducing stack 0 by rule 17 (line 108):
+Reducing stack 0 by rule 17 (line 121):
    $1 = token '!' ()
    $2 = token '+' ()
 Cleanup: popping token '+' ()
 Cleanup: popping nterm exp (7)
+568. calc.at:1492: testing Calculator glr2.cc %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
+./calc.at:1492: mv calc.y.tmp calc.y
+
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+syntax error, unexpected end of input
+Cleanup: discarding lookahead token end of input ()
 input:
   | 1 + 2 * 3 + !- ++
-./calc.at:1486:  $PREPARSER ./calc  input
+./calc.at:1485:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1492: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 Starting parse
 Entering state 0
 Reading a token
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 8
@@ -204125,7 +201647,7 @@
 Next token is token number (2)
 Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 29
@@ -204137,20 +201659,20 @@
 Next token is token number (3)
 Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (3)
 -> $$ = nterm exp (3)
 Entering state 30
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 92):
+Reducing stack 0 by rule 9 (line 105):
    $1 = nterm exp (2)
    $2 = token '*' ()
    $3 = nterm exp (3)
 -> $$ = nterm exp (6)
 Entering state 29
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (6)
@@ -204167,12 +201689,12 @@
 Next token is token '-' ()
 Shifting token '-' ()
 Entering state 13
-Reducing stack 0 by rule 18 (line 109):
+Reducing stack 0 by rule 18 (line 122):
    $1 = token '!' ()
    $2 = token '-' ()
 Cleanup: popping token '+' ()
 Cleanup: popping nterm exp (7)
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -204180,7 +201702,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 8
@@ -204192,7 +201714,7 @@
 Next token is token number (2)
 Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 29
@@ -204204,20 +201726,20 @@
 Next token is token number (3)
 Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (3)
 -> $$ = nterm exp (3)
 Entering state 30
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 92):
+Reducing stack 0 by rule 9 (line 105):
    $1 = nterm exp (2)
    $2 = token '*' ()
    $3 = nterm exp (3)
 -> $$ = nterm exp (6)
 Entering state 29
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (6)
@@ -204234,7 +201756,7 @@
 Next token is token '-' ()
 Shifting token '-' ()
 Entering state 13
-Reducing stack 0 by rule 18 (line 109):
+Reducing stack 0 by rule 18 (line 122):
    $1 = token '!' ()
    $2 = token '-' ()
 Cleanup: popping token '+' ()
@@ -204249,39 +201771,25 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-stdout:
-./calc.at:1487: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc
-
 ./calc.at:1486: cat stderr
+./calc.at:1485: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1485: cat stderr
 input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1487:  $PREPARSER ./calc  input
-input:
-  | (#) + (#) = 2222
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 ./calc.at:1486:  $PREPARSER ./calc  input
+input:
 stderr:
+  | (#) + (#) = 2222
+./calc.at:1485:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -204289,14 +201797,9 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
 Next token is token ')' ()
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error ()
 Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
@@ -204316,909 +201819,574 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (2222)
-Shifting token number (2222)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (2222)
--> $$ = nterm exp (2222)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (2222)
-   $2 = token '=' ()
-   $3 = nterm exp (2222)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2222)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of input ()
-Entering state 16
-Cleanup: popping token end of input ()
-Cleanup: popping nterm input ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
+Reducing stack 0 by rule 5 (line 79):
    $1 = token number (1)
 -> $$ = nterm exp (1)
-Entering state 8
+Entering state 12
 Reading a token
 Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
 Entering state 29
 Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 30
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 9 (line 105):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
-Entering state 29
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
    $1 = nterm exp (1)
    $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (7)
-Shifting token number (7)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (7)
--> $$ = nterm exp (7)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (7)
-   $2 = token '=' ()
-   $3 = nterm exp (7)
--> $$ = nterm exp (7)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (7)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
+Reducing stack 0 by rule 5 (line 79):
    $1 = token number (1)
 -> $$ = nterm exp (1)
-Entering state 8
+Entering state 29
 Reading a token
 Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (3)
+Entering state 12
+Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
+Next token is token ')' ()
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' ()
+Error: popping nterm exp (3)
+Shifting token error ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
 Entering state 29
 Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 10
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
 Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 115):
-   $1 = token '-' ()
-   $2 = nterm exp (3)
--> $$ = nterm exp (-3)
-Entering state 30
-Next token is token '=' ()
-Reducing stack 0 by rule 9 (line 105):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (-3)
--> $$ = nterm exp (-6)
-Entering state 29
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (-6)
--> $$ = nterm exp (-5)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
+Next token is token '*' ()
+Error: discarding token '*' ()
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
+Next token is token '*' ()
+Error: discarding token '*' ()
 Reading a token
-Next token is token number (5)
-Shifting token number (5)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (5)
--> $$ = nterm exp (5)
-Entering state 10
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
 Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 115):
-   $1 = token '-' ()
-   $2 = nterm exp (5)
--> $$ = nterm exp (-5)
-Entering state 27
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (-5)
-   $2 = token '=' ()
-   $3 = nterm exp (-5)
--> $$ = nterm exp (-5)
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2222)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (3333)
 Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (-5)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 87):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
+Reducing stack 0 by rule 5 (line 79):
    $1 = token number (1)
 -> $$ = nterm exp (1)
-Entering state 10
+Entering state 12
 Reading a token
-Next token is token '^' ()
-Shifting token '^' ()
-Entering state 23
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
 Reading a token
 Next token is token number (2)
 Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
+Reducing stack 0 by rule 5 (line 79):
    $1 = token number (2)
 -> $$ = nterm exp (2)
-Entering state 32
+Entering state 30
 Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 116):
+Next token is token '*' ()
+Reducing stack 0 by rule 9 (line 92):
    $1 = nterm exp (1)
-   $2 = token '^' ()
+   $2 = token '*' ()
    $3 = nterm exp (2)
--> $$ = nterm exp (1)
-Entering state 10
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' ()
+Error: popping nterm exp (2)
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 115):
-   $1 = token '-' ()
-   $2 = nterm exp (1)
--> $$ = nterm exp (-1)
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (3333)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (4444)
 Entering state 8
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
+Reducing stack 0 by rule 5 (line 79):
    $1 = token number (1)
 -> $$ = nterm exp (1)
-Entering state 10
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 115):
-   $1 = token '-' ()
-   $2 = nterm exp (1)
--> $$ = nterm exp (-1)
 Entering state 27
+Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (-1)
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (4444)
    $2 = token '=' ()
-   $3 = nterm exp (-1)
--> $$ = nterm exp (-1)
+   $3 = nterm exp (1)
+error: 4444 != 1
+-> $$ = nterm exp (4444)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (-1)
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (4444)
    $2 = token '\n' ()
 -> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 10
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 11 (line 115):
-   $1 = token '-' ()
-   $2 = nterm exp (1)
--> $$ = nterm exp (-1)
-Entering state 12
+Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 117):
+Entering state 25
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
-   $2 = nterm exp (-1)
+   $2 = token error ()
    $3 = token ')' ()
--> $$ = nterm exp (-1)
+-> $$ = nterm exp (1111)
 Entering state 8
 Reading a token
-Next token is token '^' ()
-Shifting token '^' ()
-Entering state 23
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 32
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 118):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 116):
-   $1 = nterm exp (-1)
-   $2 = token '^' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (1)
+Reducing stack 0 by rule 7 (line 103):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
 Entering state 8
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token number (2222)
+Shifting token number (2222)
 Entering state 1
 Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+   $1 = token number (2222)
+-> $$ = nterm exp (2222)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
 Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (1)
+   $1 = nterm exp (2222)
    $2 = token '=' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (1)
+   $3 = nterm exp (2222)
+-> $$ = nterm exp (2222)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (1)
+   $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 82):
+   $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 87):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
+Now at end of input.
+Shifting token end of file ()
+Entering state 16
+Cleanup: popping token end of file ()
+Cleanup: popping nterm input ()
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+Starting parse
+Entering state 0
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
+Next token is token ')' ()
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
+Reducing stack 0 by rule 5 (line 79):
    $1 = token number (1)
 -> $$ = nterm exp (1)
-Entering state 10
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 115):
-   $1 = token '-' ()
-   $2 = nterm exp (1)
--> $$ = nterm exp (-1)
-Entering state 10
-Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 115):
-   $1 = token '-' ()
-   $2 = nterm exp (-1)
--> $$ = nterm exp (1)
-Entering state 10
-Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 115):
-   $1 = token '-' ()
-   $2 = nterm exp (1)
--> $$ = nterm exp (-1)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
+Entering state 12
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
+Reducing stack 0 by rule 5 (line 79):
    $1 = token number (1)
 -> $$ = nterm exp (1)
-Entering state 10
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 115):
-   $1 = token '-' ()
-   $2 = nterm exp (1)
--> $$ = nterm exp (-1)
-Entering state 27
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (-1)
-   $2 = token '=' ()
-   $3 = nterm exp (-1)
--> $$ = nterm exp (-1)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (-1)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
+Entering state 29
 Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 87):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
+Reducing stack 0 by rule 5 (line 79):
    $1 = token number (1)
 -> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 28
-Reading a token
-Next token is token '-' ()
-Reducing stack 0 by rule 8 (line 104):
-   $1 = nterm exp (1)
-   $2 = token '-' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (-1)
-Entering state 8
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
+Entering state 29
 Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
 -> $$ = nterm exp (3)
-Entering state 28
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 8 (line 104):
-   $1 = nterm exp (-1)
-   $2 = token '-' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (-4)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token number (4)
-Shifting token number (4)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (4)
--> $$ = nterm exp (4)
-Entering state 10
+Entering state 12
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 115):
-   $1 = token '-' ()
-   $2 = nterm exp (4)
--> $$ = nterm exp (-4)
-Entering state 27
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (-4)
-   $2 = token '=' ()
-   $3 = nterm exp (-4)
--> $$ = nterm exp (-4)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (-4)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
+Next token is token ')' ()
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' ()
+Error: popping nterm exp (3)
+Shifting token error ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
 Entering state 8
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 12
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
+Next token is token '*' ()
+Error: discarding token '*' ()
 Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 28
+Next token is token '*' ()
+Error: discarding token '*' ()
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 104):
-   $1 = nterm exp (2)
-   $2 = token '-' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (-1)
-Entering state 12
+Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 117):
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
-   $2 = nterm exp (-1)
+   $2 = token error ()
    $3 = token ')' ()
--> $$ = nterm exp (-1)
-Entering state 28
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 8 (line 104):
-   $1 = nterm exp (1)
-   $2 = token '-' ()
-   $3 = nterm exp (-1)
--> $$ = nterm exp (2)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (2)
-   $2 = token '=' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (2)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (2)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 87):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 8
-Reading a token
-Next token is token '^' ()
-Shifting token '^' ()
-Entering state 23
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 32
-Reading a token
-Next token is token '^' ()
-Shifting token '^' ()
-Entering state 23
-Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 32
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 116):
-   $1 = nterm exp (2)
-   $2 = token '^' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (8)
-Entering state 32
-Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 116):
-   $1 = nterm exp (2)
-   $2 = token '^' ()
-   $3 = nterm exp (8)
--> $$ = nterm exp (256)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (256)
-Shifting token number (256)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (256)
--> $$ = nterm exp (256)
-Entering state 27
+-> $$ = nterm exp (1111)
+Entering state 29
 Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (256)
-   $2 = token '=' ()
-   $3 = nterm exp (256)
--> $$ = nterm exp (256)
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2222)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (3333)
 Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (256)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
 Entering state 12
 Reading a token
-Next token is token '^' ()
-Shifting token '^' ()
-Entering state 23
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
 Reading a token
 Next token is token number (2)
 Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
+Reducing stack 0 by rule 5 (line 79):
    $1 = token number (2)
 -> $$ = nterm exp (2)
-Entering state 32
+Entering state 30
 Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 12 (line 116):
-   $1 = nterm exp (2)
-   $2 = token '^' ()
+Next token is token '*' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1)
+   $2 = token '*' ()
    $3 = nterm exp (2)
--> $$ = nterm exp (4)
+-> $$ = nterm exp (2)
 Entering state 12
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' ()
+Error: popping nterm exp (2)
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 117):
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
-   $2 = nterm exp (4)
+   $2 = token error ()
    $3 = token ')' ()
--> $$ = nterm exp (4)
-Entering state 8
-Reading a token
-Next token is token '^' ()
-Shifting token '^' ()
-Entering state 23
-Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 32
+-> $$ = nterm exp (1111)
+Entering state 29
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 116):
-   $1 = nterm exp (4)
-   $2 = token '^' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (64)
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (3333)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (4444)
 Entering state 8
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (64)
-Shifting token number (64)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (64)
--> $$ = nterm exp (64)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (64)
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (4444)
    $2 = token '=' ()
-   $3 = nterm exp (64)
--> $$ = nterm exp (64)
+   $3 = nterm exp (1)
+error: 4444 != 1
+-> $$ = nterm exp (4444)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (64)
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (4444)
    $2 = token '\n' ()
 -> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of file ()
+Shifting token end of input ()
 Entering state 16
-Cleanup: popping token end of file ()
+Cleanup: popping token end of input ()
 Cleanup: popping nterm input ()
-./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+./calc.at:1492: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
 Starting parse
 Entering state 0
 Reading a token
@@ -205238,7 +202406,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -205265,7 +202433,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -205273,7 +202441,7 @@
 Entering state 29
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1111)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -205286,13 +202454,13 @@
 Next token is token number (2222)
 Shifting token number (2222)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (2222)
 -> $$ = nterm exp (2222)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (2222)
    $2 = token '=' ()
    $3 = nterm exp (2222)
@@ -205301,345 +202469,236 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1485: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1485: cat stderr
+./calc.at:1486: cat stderr
+input:
+  | (!!) + (1 2) = 1
+./calc.at:1486:  $PREPARSER ./calc  input
+input:
 stderr:
+  | (1 + #) = 1111
 Starting parse
 Entering state 0
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 29
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
 Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 30
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 15
+Reducing stack 0 by rule 16 (line 107):
+   $1 = token '!' ()
+   $2 = token '!' ()
+Shifting token error ()
+Entering state 11
 Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 9 (line 105):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
-Entering state 29
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
 Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
 Reading a token
-Next token is token number (7)
-Shifting token number (7)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (7)
--> $$ = nterm exp (7)
-Entering state 27
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (7)
-   $2 = token '=' ()
-   $3 = nterm exp (7)
--> $$ = nterm exp (7)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (7)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
+Reducing stack 0 by rule 5 (line 79):
    $1 = token number (1)
 -> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
+Entering state 12
 Reading a token
 Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 29
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 10
+syntax error, unexpected number
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token number (2)
+Error: discarding token number (2)
 Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 115):
-   $1 = token '-' ()
-   $2 = nterm exp (3)
--> $$ = nterm exp (-3)
-Entering state 30
-Next token is token '=' ()
-Reducing stack 0 by rule 9 (line 105):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (-3)
--> $$ = nterm exp (-6)
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
 Entering state 29
+Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1)
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
    $2 = token '+' ()
-   $3 = nterm exp (-6)
--> $$ = nterm exp (-5)
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
 Entering state 8
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token number (5)
-Shifting token number (5)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (5)
--> $$ = nterm exp (5)
-Entering state 10
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 115):
-   $1 = token '-' ()
-   $2 = nterm exp (5)
--> $$ = nterm exp (-5)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
 Entering state 27
+Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (-5)
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (2222)
    $2 = token '=' ()
-   $3 = nterm exp (-5)
--> $$ = nterm exp (-5)
+   $3 = nterm exp (1)
+error: 2222 != 1
+-> $$ = nterm exp (2222)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (-5)
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 87):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1485:  $PREPARSER ./calc  input
+stderr:
+stderr:
+Starting parse
+Entering state 0
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 10
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
-Next token is token '^' ()
-Shifting token '^' ()
-Entering state 23
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 32
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 15
+Reducing stack 0 by rule 16 (line 107):
+   $1 = token '!' ()
+   $2 = token '!' ()
+Shifting token error ()
+Entering state 11
 Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 116):
-   $1 = nterm exp (1)
-   $2 = token '^' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (1)
-Entering state 10
-Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 115):
-   $1 = token '-' ()
-   $2 = nterm exp (1)
--> $$ = nterm exp (-1)
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
 Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 10
 Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 115):
-   $1 = token '-' ()
-   $2 = nterm exp (1)
--> $$ = nterm exp (-1)
-Entering state 27
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (-1)
-   $2 = token '=' ()
-   $3 = nterm exp (-1)
--> $$ = nterm exp (-1)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (-1)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
+Reducing stack 0 by rule 5 (line 79):
    $1 = token number (1)
 -> $$ = nterm exp (1)
-Entering state 10
+Entering state 12
+Reading a token
+Next token is token number (2)
+syntax error, unexpected number
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token number (2)
+Error: discarding token number (2)
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 11 (line 115):
-   $1 = token '-' ()
-   $2 = nterm exp (1)
--> $$ = nterm exp (-1)
-Entering state 12
+Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 117):
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
-   $2 = nterm exp (-1)
+   $2 = token error ()
    $3 = token ')' ()
--> $$ = nterm exp (-1)
-Entering state 8
-Reading a token
-Next token is token '^' ()
-Shifting token '^' ()
-Entering state 23
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 32
+-> $$ = nterm exp (1111)
+Entering state 29
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 116):
-   $1 = nterm exp (-1)
-   $2 = token '^' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (1)
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
 Entering state 8
 Next token is token '=' ()
 Shifting token '=' ()
@@ -205648,56 +202707,43 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
+Reducing stack 0 by rule 5 (line 79):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (1)
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (2222)
    $2 = token '=' ()
    $3 = nterm exp (1)
--> $$ = nterm exp (1)
+error: 2222 != 1
+-> $$ = nterm exp (2222)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (1)
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 87):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+Starting parse
+Entering state 0
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
 Next token is token number (1)
 Shifting token number (1)
@@ -205705,81 +202751,78 @@
 Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
-Entering state 10
+Entering state 12
 Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 115):
-   $1 = token '-' ()
-   $2 = nterm exp (1)
--> $$ = nterm exp (-1)
-Entering state 10
-Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 115):
-   $1 = token '-' ()
-   $2 = nterm exp (-1)
--> $$ = nterm exp (1)
-Entering state 10
-Next token is token '=' ()
-Reducing stack 0 by rule 11 (line 115):
-   $1 = token '-' ()
-   $2 = nterm exp (1)
--> $$ = nterm exp (-1)
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 118):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
 Entering state 8
+Reading a token
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token number (1111)
+Shifting token number (1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 10
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 115):
-   $1 = token '-' ()
-   $2 = nterm exp (1)
--> $$ = nterm exp (-1)
+   $1 = token number (1111)
+-> $$ = nterm exp (1111)
 Entering state 27
+Reading a token
 Next token is token '\n' ()
 Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (-1)
+   $1 = nterm exp (1111)
    $2 = token '=' ()
-   $3 = nterm exp (-1)
--> $$ = nterm exp (-1)
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (-1)
+   $1 = nterm exp (1111)
    $2 = token '\n' ()
 -> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 82):
+   $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 87):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
+Now at end of input.
+Shifting token end of file ()
+Entering state 16
+Cleanup: popping token end of file ()
+Cleanup: popping nterm input ()
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
 Next token is token number (1)
 Shifting token number (1)
@@ -205787,364 +202830,278 @@
 Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
-Entering state 8
+Entering state 12
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 28
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
 Reading a token
-Next token is token '-' ()
-Reducing stack 0 by rule 8 (line 104):
-   $1 = nterm exp (1)
-   $2 = token '-' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (-1)
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 118):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
 Entering state 8
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
-Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 28
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 8 (line 104):
-   $1 = nterm exp (-1)
-   $2 = token '-' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (-4)
-Entering state 8
-Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token number (4)
-Shifting token number (4)
+Next token is token number (1111)
+Shifting token number (1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (4)
--> $$ = nterm exp (4)
-Entering state 10
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 11 (line 115):
-   $1 = token '-' ()
-   $2 = nterm exp (4)
--> $$ = nterm exp (-4)
+   $1 = token number (1111)
+-> $$ = nterm exp (1111)
 Entering state 27
+Reading a token
 Next token is token '\n' ()
 Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (-4)
+   $1 = nterm exp (1111)
    $2 = token '=' ()
-   $3 = nterm exp (-4)
--> $$ = nterm exp (-4)
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (-4)
+   $1 = nterm exp (1111)
    $2 = token '\n' ()
 -> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 82):
+   $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
+Now at end of input.
+Shifting token end of file ()
+Entering state 16
+Cleanup: popping token end of file ()
+Cleanup: popping nterm input ()
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1485: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1486: cat stderr
+./calc.at:1485: cat stderr
+input:
+  | (- *) + (1 2) = 1
+./calc.at:1486:  $PREPARSER ./calc  input
+input:
+  | (# + 1) = 1111
+./calc.at:1485:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
 Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 12
-Reading a token
 Next token is token '-' ()
 Shifting token '-' ()
-Entering state 19
+Entering state 2
 Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 28
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 9
+Reducing stack 0 by rule 15 (line 106):
+   $1 = token '-' ()
+   $2 = token error ()
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 104):
-   $1 = nterm exp (2)
-   $2 = token '-' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (-1)
-Entering state 12
+Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 117):
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
-   $2 = nterm exp (-1)
+   $2 = token error ()
    $3 = token ')' ()
--> $$ = nterm exp (-1)
-Entering state 28
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 8 (line 104):
-   $1 = nterm exp (1)
-   $2 = token '-' ()
-   $3 = nterm exp (-1)
--> $$ = nterm exp (2)
+-> $$ = nterm exp (1111)
 Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 27
 Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (2)
-   $2 = token '=' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (2)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (2)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 87):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 8
-Reading a token
-Next token is token '^' ()
-Shifting token '^' ()
-Entering state 23
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
 Reading a token
 Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 32
-Reading a token
-Next token is token '^' ()
-Shifting token '^' ()
-Entering state 23
+syntax error, unexpected number
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token number (2)
+Error: discarding token number (2)
 Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 32
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 116):
-   $1 = nterm exp (2)
-   $2 = token '^' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (8)
-Entering state 32
-Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 116):
-   $1 = nterm exp (2)
-   $2 = token '^' ()
-   $3 = nterm exp (8)
--> $$ = nterm exp (256)
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
 Entering state 8
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (256)
-Shifting token number (256)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (256)
--> $$ = nterm exp (256)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (256)
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (2222)
    $2 = token '=' ()
-   $3 = nterm exp (256)
--> $$ = nterm exp (256)
+   $3 = nterm exp (1)
+error: 2222 != 1
+-> $$ = nterm exp (2222)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (256)
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 12
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
 Reading a token
-Next token is token '^' ()
-Shifting token '^' ()
-Entering state 23
+Next token is token '+' ()
+Error: discarding token '+' ()
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 32
+Next token is token number (1)
+Error: discarding token number (1)
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 12 (line 116):
-   $1 = nterm exp (2)
-   $2 = token '^' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (4)
-Entering state 12
+Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 117):
+Entering state 25
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
-   $2 = nterm exp (4)
+   $2 = token error ()
    $3 = token ')' ()
--> $$ = nterm exp (4)
+-> $$ = nterm exp (1111)
 Entering state 8
 Reading a token
-Next token is token '^' ()
-Shifting token '^' ()
-Entering state 23
-Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 32
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 12 (line 116):
-   $1 = nterm exp (4)
-   $2 = token '^' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (64)
-Entering state 8
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (64)
-Shifting token number (64)
+Next token is token number (1111)
+Shifting token number (1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (64)
--> $$ = nterm exp (64)
+   $1 = token number (1111)
+-> $$ = nterm exp (1111)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
 Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (64)
+   $1 = nterm exp (1111)
    $2 = token '=' ()
-   $3 = nterm exp (64)
--> $$ = nterm exp (64)
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (64)
+   $1 = nterm exp (1111)
    $2 = token '\n' ()
 -> $$ = nterm line ()
-Entering state 17
-Reducing stack 0 by rule 2 (line 83):
-   $1 = nterm input ()
-   $2 = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 82):
+   $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
@@ -206153,61 +203110,46 @@
 Entering state 16
 Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-input:
-  | 1 2
-./calc.at:1487:  $PREPARSER ./calc  input
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
-Next token is token number (2)
-syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token number (2)
-./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-Starting parse
-Entering state 0
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 9
+Reducing stack 0 by rule 15 (line 106):
+   $1 = token '-' ()
+   $2 = token error ()
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
 Entering state 8
 Reading a token
-Next token is token number (2)
-syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token number (2)
-./calc.at:1486: cat stderr
-./calc.at:1487: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1486:  $PREPARSER ./calc  input
-stderr:
-input:
-Starting parse
-Entering state 0
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
@@ -206221,18 +203163,13 @@
 -> $$ = nterm exp (1)
 Entering state 12
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
+Next token is token number (2)
+syntax error, unexpected number
 Error: popping nterm exp (1)
 Shifting token error ()
 Entering state 11
-Next token is token error ()
-Error: discarding token error ()
+Next token is token number (2)
+Error: discarding token number (2)
 Reading a token
 Next token is token ')' ()
 Entering state 11
@@ -206244,32 +203181,40 @@
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
-Entering state 8
+Entering state 29
 Reading a token
 Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (1111)
-Shifting token number (1111)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1111)
--> $$ = nterm exp (1111)
+   $1 = token number (1)
+-> $$ = nterm exp (1)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
+   $1 = nterm exp (2222)
    $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
+   $3 = nterm exp (1)
+error: 2222 != 1
+-> $$ = nterm exp (2222)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
+   $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
@@ -206283,10 +203228,7 @@
 Entering state 16
 Cleanup: popping token end of input ()
 Cleanup: popping nterm input ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-  | 1//2
 stderr:
-./calc.at:1487:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -206294,33 +203236,25 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
 syntax error: invalid character: '#'
 Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
 Shifting token error ()
 Entering state 11
 Next token is token error ()
 Error: discarding token error ()
 Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Reading a token
+Next token is token number (1)
+Error: discarding token number (1)
+Reading a token
 Next token is token ')' ()
 Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -206334,13 +203268,13 @@
 Next token is token number (1111)
 Shifting token number (1111)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1111)
 -> $$ = nterm exp (1111)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (1111)
    $2 = token '=' ()
    $3 = nterm exp (1111)
@@ -206349,64 +203283,21 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (1111)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Reading a token
-Next token is token '/' ()
-syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-Error: popping token '/' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '/' ()
-./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Reading a token
-Next token is token '/' ()
-syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
-Error: popping token '/' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '/' ()
 ./calc.at:1486: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -206417,23 +203308,25 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1487: cat stderr
-input:
 ./calc.at:1486: cat stderr
-  | error
-./calc.at:1487:  $PREPARSER ./calc  input
+./calc.at:1485: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1485: cat stderr
 input:
-stderr:
-  | (# + 1) = 1111
+  | (* *) + (*) + (*)
 ./calc.at:1486:  $PREPARSER ./calc  input
-Starting parse
-Entering state 0
-Reading a token
-Next token is token invalid token ()
-syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-Cleanup: discarding lookahead token invalid token ()
-./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
 stderr:
+  | (1 + # + 1) = 1111
+./calc.at:1485:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -206441,18 +203334,42 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 Shifting token error ()
 Entering state 11
-Next token is token error ()
-Error: discarding token error ()
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
 Reading a token
 Next token is token '+' ()
-Error: discarding token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token number (1)
-Error: discarding token number (1)
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
 Reading a token
 Next token is token ')' ()
 Entering state 11
@@ -206464,32 +203381,54 @@
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
 Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
-Next token is token number (1111)
-Shifting token number (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1111)
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
 -> $$ = nterm exp (1111)
-Entering state 27
+Entering state 29
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2222)
+   $2 = token '+' ()
    $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
+-> $$ = nterm exp (3333)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
+   $1 = nterm exp (3333)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
@@ -206513,8 +203452,22 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
 syntax error: invalid character: '#'
 Next token is token error ()
+Error: popping token '+' ()
+Error: popping nterm exp (1)
 Shifting token error ()
 Entering state 11
 Next token is token error ()
@@ -206531,7 +203484,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -206545,13 +203498,13 @@
 Next token is token number (1111)
 Shifting token number (1111)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1111)
 -> $$ = nterm exp (1111)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (1111)
    $2 = token '=' ()
    $3 = nterm exp (1111)
@@ -206560,141 +203513,99 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (1111)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
-Next token is token invalid token ()
-syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-Cleanup: discarding lookahead token invalid token ()
-./calc.at:1487: cat stderr
-input:
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-  | 1 = 2 = 3
-./calc.at:1487:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 27
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
 Reading a token
-Next token is token '=' ()
-syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
-Error: popping nterm exp (2)
-Error: popping token '=' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '=' ()
-./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1486: cat stderr
-stderr:
-Starting parse
-Entering state 0
+Next token is token '*' ()
+Error: discarding token '*' ()
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
 Entering state 8
 Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 27
-Reading a token
-Next token is token '=' ()
-syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
-Error: popping nterm exp (2)
-Error: popping token '=' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '=' ()
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1486:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1487: cat stderr
-Starting parse
-Entering state 0
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
 Reading a token
 Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
 Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
 Shifting token error ()
 Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token number (1)
-Error: discarding token number (1)
+Next token is token '*' ()
+Error: discarding token '*' ()
 Reading a token
 Next token is token ')' ()
 Entering state 11
@@ -206706,32 +203617,20 @@
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (1111)
-Shifting token number (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1111)
--> $$ = nterm exp (1111)
-Entering state 27
+Entering state 29
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2222)
+   $2 = token '+' ()
    $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
+-> $$ = nterm exp (3333)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1111)
+   $1 = nterm exp (3333)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
@@ -206745,7 +203644,6 @@
 Entering state 16
 Cleanup: popping token end of input ()
 Cleanup: popping nterm input ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -206757,7 +203655,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -206786,7 +203684,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -206800,13 +203698,13 @@
 Next token is token number (1111)
 Shifting token number (1111)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1111)
 -> $$ = nterm exp (1111)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (1111)
    $2 = token '=' ()
    $3 = nterm exp (1111)
@@ -206815,66 +203713,21 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (1111)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-input:
-  | 
-  | +1
-./calc.at:1487:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 87):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '+' ()
-syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
-Error: popping nterm input ()
-Cleanup: discarding lookahead token '+' ()
-./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 87):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '+' ()
-syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
-Error: popping nterm input ()
-Cleanup: discarding lookahead token '+' ()
 ./calc.at:1486: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -206885,21 +203738,92 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1485: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1486: cat stderr
-./calc.at:1487: cat stderr
-./calc.at:1487:  $PREPARSER ./calc  /dev/null
+./calc.at:1485: cat stderr
 input:
-stderr:
-  | (1 + 1) / (1 - 1)
+  | 1 + 2 * 3 + !+ ++
 ./calc.at:1486:  $PREPARSER ./calc  input
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1485:  $PREPARSER ./calc  input
+stderr:
+stderr:
 Starting parse
 Entering state 0
 Reading a token
-Now at end of input.
-syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-Cleanup: discarding lookahead token end of file ()
-./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 29
+Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token number (3)
+Shifting token number (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 30
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 14
+Reducing stack 0 by rule 17 (line 108):
+   $1 = token '!' ()
+   $2 = token '+' ()
+Cleanup: popping token '+' ()
+Cleanup: popping nterm exp (7)
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -206910,7 +203834,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -206922,13 +203846,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 29
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (1)
@@ -206937,7 +203861,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+Reducing stack 0 by rule 13 (line 117):
    $1 = token '(' ()
    $2 = nterm exp (2)
    $3 = token ')' ()
@@ -206955,7 +203879,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -206967,13 +203891,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 28
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 91):
+Reducing stack 0 by rule 8 (line 104):
    $1 = nterm exp (1)
    $2 = token '-' ()
    $3 = nterm exp (1)
@@ -206982,7 +203906,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+Reducing stack 0 by rule 13 (line 117):
    $1 = token '(' ()
    $2 = nterm exp (0)
    $3 = token ')' ()
@@ -206990,7 +203914,7 @@
 Entering state 31
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 10 (line 93):
+Reducing stack 0 by rule 10 (line 106):
    $1 = nterm exp (2)
    $2 = token '/' ()
    $3 = nterm exp (0)
@@ -207000,30 +203924,89 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (2)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Now at end of input.
-syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
-Cleanup: discarding lookahead token end of file ()
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 29
+Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token number (3)
+Shifting token number (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 30
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 14
+Reducing stack 0 by rule 17 (line 108):
+   $1 = token '!' ()
+   $2 = token '+' ()
+Cleanup: popping token '+' ()
+Cleanup: popping nterm exp (7)
 Starting parse
 Entering state 0
 Reading a token
@@ -207034,7 +204017,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -207046,13 +204029,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 29
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (1)
@@ -207061,7 +204044,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+Reducing stack 0 by rule 13 (line 117):
    $1 = token '(' ()
    $2 = nterm exp (2)
    $3 = token ')' ()
@@ -207079,7 +204062,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -207091,13 +204074,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 28
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 91):
+Reducing stack 0 by rule 8 (line 104):
    $1 = nterm exp (1)
    $2 = token '-' ()
    $3 = nterm exp (1)
@@ -207106,7 +204089,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+Reducing stack 0 by rule 13 (line 117):
    $1 = token '(' ()
    $2 = nterm exp (0)
    $3 = token ')' ()
@@ -207114,7 +204097,7 @@
 Entering state 31
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 10 (line 93):
+Reducing stack 0 by rule 10 (line 106):
    $1 = nterm exp (2)
    $2 = token '/' ()
    $3 = nterm exp (0)
@@ -207124,180 +204107,211 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (2)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-./calc.at:1487: cat stderr
-./calc.at:1486: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1487:  $PREPARSER ./calc  input
-./calc.at:1486: cat stderr
+  | 1 + 2 * 3 + !- ++
+./calc.at:1486:  $PREPARSER ./calc  input
 stderr:
-561. calc.at:1486:  ok
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token ')' ()
-syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-Shifting token error ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
+Reducing stack 0 by rule 5 (line 79):
    $1 = token number (1)
 -> $$ = nterm exp (1)
-Entering state 12
+Entering state 8
 Reading a token
 Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
 Entering state 29
 Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token number (3)
+Shifting token number (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 30
+Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
    $1 = nterm exp (1)
    $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (2)
-Entering state 12
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
+Entering state 8
 Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
 Reading a token
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 13
+Reducing stack 0 by rule 18 (line 109):
+   $1 = token '!' ()
+   $2 = token '-' ()
+Cleanup: popping token '+' ()
+Cleanup: popping nterm exp (7)
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1485: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+Starting parse
+Entering state 0
+Reading a token
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
+Reducing stack 0 by rule 5 (line 79):
    $1 = token number (1)
 -> $$ = nterm exp (1)
-Entering state 29
+Entering state 8
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (2)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (3)
-Entering state 12
-Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token ')' ()
-syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-Error: popping token '+' ()
-Error: popping nterm exp (3)
-Shifting token error ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
 Entering state 29
 Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token number (3)
+Shifting token number (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 30
+Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1111)
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
    $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
 Entering state 8
 Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
 Reading a token
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 13
+Reducing stack 0 by rule 18 (line 109):
+   $1 = token '!' ()
+   $2 = token '-' ()
+Cleanup: popping token '+' ()
+Cleanup: popping nterm exp (7)
+./calc.at:1485: cat stderr
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+559. calc.at:1485:  ok
+./calc.at:1486: cat stderr
+input:
+
+  | (#) + (#) = 2222
+./calc.at:1486:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '*' ()
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+syntax error: invalid character: '#'
+Next token is token error ()
 Shifting token error ()
 Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token error ()
+Error: discarding token error ()
 Reading a token
 Next token is token ')' ()
 Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (2222)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (3333)
 Entering state 8
+Reading a token
 Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
@@ -207306,52 +204320,19 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 30
-Reading a token
-Next token is token '*' ()
-Reducing stack 0 by rule 9 (line 105):
-   $1 = nterm exp (1)
-   $2 = token '*' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token '*' ()
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-Error: popping token '*' ()
-Error: popping nterm exp (2)
+syntax error: invalid character: '#'
+Next token is token error ()
 Shifting token error ()
 Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token error ()
+Error: discarding token error ()
 Reading a token
 Next token is token ')' ()
 Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -207359,536 +204340,63 @@
 Entering state 29
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (3333)
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
    $2 = token '+' ()
    $3 = nterm exp (1111)
--> $$ = nterm exp (4444)
+-> $$ = nterm exp (2222)
 Entering state 8
 Next token is token '=' ()
 Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token number (2222)
+Shifting token number (2222)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (2222)
+-> $$ = nterm exp (2222)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (4444)
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (2222)
    $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 4444 != 1
--> $$ = nterm exp (4444)
+   $3 = nterm exp (2222)
+-> $$ = nterm exp (2222)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (4444)
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 82):
+Reducing stack 0 by rule 1 (line 69):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of file ()
+Shifting token end of input ()
 Entering state 16
-Cleanup: popping token end of file ()
+Cleanup: popping token end of input ()
 Cleanup: popping nterm input ()
-./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token ')' ()
-syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-Shifting token error ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (2)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (3)
-Entering state 12
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token ')' ()
-syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
-Error: popping token '+' ()
-Error: popping nterm exp (3)
-Shifting token error ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '*' ()
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (2222)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (3333)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 30
-Reading a token
-Next token is token '*' ()
-Reducing stack 0 by rule 9 (line 105):
-   $1 = nterm exp (1)
-   $2 = token '*' ()
-   $3 = nterm exp (2)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token '*' ()
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-Error: popping token '*' ()
-Error: popping nterm exp (2)
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (3333)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (4444)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (4444)
-   $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 4444 != 1
--> $$ = nterm exp (4444)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (4444)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-./calc.at:1487: cat stderr
-
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1487:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 15
-Reducing stack 0 by rule 16 (line 120):
-   $1 = token '!' ()
-   $2 = token '!' ()
-Shifting token error ()
-Entering state 11
-Reading a token
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token number (2)
-syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token number (2)
-Error: discarding token number (2)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (2222)
-   $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 2222 != 1
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (2222)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 15
-Reducing stack 0 by rule 16 (line 120):
-   $1 = token '!' ()
-   $2 = token '!' ()
-Shifting token error ()
-Entering state 11
-Reading a token
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token number (2)
-syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token number (2)
-Error: discarding token number (2)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (2222)
-   $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 2222 != 1
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (2222)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-./calc.at:1487: cat stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1487:  $PREPARSER ./calc  input
-570. calc.at:1494: testing Calculator glr2.cc %no-lines %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
+stdout:
 stderr:
-./calc.at:1494: mv calc.y.tmp calc.y
+./calc.at:1491: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc calc.hh
 
 Starting parse
 Entering state 0
@@ -207897,856 +204405,6 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token '*' ()
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-Shifting token error ()
-Entering state 9
-Reducing stack 0 by rule 15 (line 119):
-   $1 = token '-' ()
-   $2 = token error ()
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token number (2)
-syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token number (2)
-Error: discarding token number (2)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (2222)
-   $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 2222 != 1
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (2222)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
-Reading a token
-Next token is token '*' ()
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-Shifting token error ()
-Entering state 9
-Reducing stack 0 by rule 15 (line 119):
-   $1 = token '-' ()
-   $2 = token error ()
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token number (2)
-syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token number (2)
-Error: discarding token number (2)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (2222)
-   $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 2222 != 1
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (2222)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-./calc.at:1494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
-./calc.at:1487: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1487:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '*' ()
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '*' ()
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '*' ()
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (2222)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (3333)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (3333)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '*' ()
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '*' ()
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token '*' ()
-syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
-Shifting token error ()
-Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (2222)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (3333)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (3333)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-./calc.at:1487: cat stderr
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1487:  $PREPARSER ./calc  input
-stderr:
-./calc.at:1494: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
-Starting parse
-Entering state 0
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 29
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 30
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 105):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
-Entering state 29
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 14
-Reducing stack 0 by rule 17 (line 121):
-   $1 = token '!' ()
-   $2 = token '+' ()
-Cleanup: popping token '+' ()
-Cleanup: popping nterm exp (7)
-./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 29
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 30
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 105):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
-Entering state 29
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 14
-Reducing stack 0 by rule 17 (line 121):
-   $1 = token '!' ()
-   $2 = token '+' ()
-Cleanup: popping token '+' ()
-Cleanup: popping nterm exp (7)
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1487:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 29
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 30
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 105):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
-Entering state 29
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 13
-Reducing stack 0 by rule 18 (line 122):
-   $1 = token '!' ()
-   $2 = token '-' ()
-Cleanup: popping token '+' ()
-Cleanup: popping nterm exp (7)
-./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 29
-Reading a token
-Next token is token '*' ()
-Shifting token '*' ()
-Entering state 21
-Reading a token
-Next token is token number (3)
-Shifting token number (3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (3)
--> $$ = nterm exp (3)
-Entering state 30
-Reading a token
-Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 105):
-   $1 = nterm exp (2)
-   $2 = token '*' ()
-   $3 = nterm exp (3)
--> $$ = nterm exp (6)
-Entering state 29
-Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (6)
--> $$ = nterm exp (7)
-Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 13
-Reducing stack 0 by rule 18 (line 122):
-   $1 = token '!' ()
-   $2 = token '-' ()
-Cleanup: popping token '+' ()
-Cleanup: popping nterm exp (7)
-./calc.at:1487: cat stderr
-input:
-  | (#) + (#) = 2222
-./calc.at:1487:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 29
-Reading a token
-Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (2222)
-Shifting token number (2222)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (2222)
--> $$ = nterm exp (2222)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (2222)
-   $2 = token '=' ()
-   $3 = nterm exp (2222)
--> $$ = nterm exp (2222)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (2222)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
 syntax error: invalid character: '#'
 Next token is token error ()
 Shifting token error ()
@@ -208759,7 +204417,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -208786,7 +204444,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 118):
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -208794,7 +204452,7 @@
 Entering state 29
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 103):
+Reducing stack 0 by rule 7 (line 90):
    $1 = nterm exp (1111)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -208807,13 +204465,13 @@
 Next token is token number (2222)
 Shifting token number (2222)
 Entering state 1
-Reducing stack 0 by rule 5 (line 92):
+Reducing stack 0 by rule 5 (line 79):
    $1 = token number (2222)
 -> $$ = nterm exp (2222)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
+Reducing stack 0 by rule 6 (line 80):
    $1 = nterm exp (2222)
    $2 = token '=' ()
    $3 = nterm exp (2222)
@@ -208822,757 +204480,21 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 88):
+Reducing stack 0 by rule 4 (line 75):
    $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-./calc.at:1487: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1487:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (1111)
-Shifting token number (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (1111)
-Shifting token number (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-./calc.at:1487: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1487:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token number (1)
-Error: discarding token number (1)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (1111)
-Shifting token number (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token number (1)
-Error: discarding token number (1)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (1111)
-Shifting token number (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-./calc.at:1487: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1487:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token number (1)
-Error: discarding token number (1)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (1111)
-Shifting token number (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-syntax error: invalid character: '#'
-Next token is token error ()
-Error: popping token '+' ()
-Error: popping nterm exp (1)
-Shifting token error ()
-Entering state 11
-Next token is token error ()
-Error: discarding token error ()
-Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
-Reading a token
-Next token is token number (1)
-Error: discarding token number (1)
-Reading a token
-Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 25
-Reducing stack 0 by rule 14 (line 118):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (1111)
-Shifting token number (1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1111)
--> $$ = nterm exp (1111)
-Entering state 27
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 93):
-   $1 = nterm exp (1111)
-   $2 = token '=' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (1111)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (1111)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-./calc.at:1487: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1487:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 117):
-   $1 = token '(' ()
-   $2 = nterm exp (2)
-   $3 = token ')' ()
--> $$ = nterm exp (2)
-Entering state 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 28
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 104):
-   $1 = nterm exp (1)
-   $2 = token '-' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (0)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 117):
-   $1 = token '(' ()
-   $2 = nterm exp (0)
-   $3 = token ')' ()
--> $$ = nterm exp (0)
-Entering state 31
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 10 (line 106):
-   $1 = nterm exp (2)
-   $2 = token '/' ()
-   $3 = nterm exp (0)
-error: null divisor
--> $$ = nterm exp (2)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (2)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of file ()
-Entering state 16
-Cleanup: popping token end of file ()
-Cleanup: popping nterm input ()
-./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
-Entering state 20
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 29
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 7 (line 103):
-   $1 = nterm exp (1)
-   $2 = token '+' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (2)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 117):
-   $1 = token '(' ()
-   $2 = nterm exp (2)
-   $3 = token ')' ()
--> $$ = nterm exp (2)
-Entering state 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
-Entering state 4
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 12
-Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 19
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 92):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 28
-Reading a token
-Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 104):
-   $1 = nterm exp (1)
-   $2 = token '-' ()
-   $3 = nterm exp (1)
--> $$ = nterm exp (0)
-Entering state 12
-Next token is token ')' ()
-Shifting token ')' ()
-Entering state 26
-Reducing stack 0 by rule 13 (line 117):
-   $1 = token '(' ()
-   $2 = nterm exp (0)
-   $3 = token ')' ()
--> $$ = nterm exp (0)
-Entering state 31
-Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 10 (line 106):
-   $1 = nterm exp (2)
-   $2 = token '/' ()
-   $3 = nterm exp (0)
-error: null divisor
--> $$ = nterm exp (2)
-Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 24
-Reducing stack 0 by rule 4 (line 88):
-   $1 = nterm exp (2)
-   $2 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 82):
+Reducing stack 0 by rule 1 (line 69):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of file ()
+Shifting token end of input ()
 Entering state 16
-Cleanup: popping token end of file ()
+Cleanup: popping token end of input ()
 Cleanup: popping nterm input ()
-./calc.at:1487: cat stderr
-562. calc.at:1487:  ok
-
-571. calc.at:1504: testing Calculator lalr1.d  ...
-./calc.at:1504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
-571. calc.at:1504:  skipped (calc.at:1504)
-
-stderr:
-stdout:
-./calc.at:1491: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc calc.hh
-
 input:
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
@@ -209588,8 +204510,22 @@
   | 2^2^3 = 256
   | (2^2)^3 = 64
 ./calc.at:1491:  $PREPARSER ./calc  input
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1486: cat stderr
+input:
+  | (1 + #) = 1111
+./calc.at:1486:  $PREPARSER ./calc  input
+stderr:
 stderr:
-572. calc.at:1509: testing Calculator D   ...
 Starting parse
 Entering state 0
 Reading a token
@@ -210426,9 +205362,164 @@
 Entering state 16
 Cleanup: popping token "end of input" (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
-./calc.at:1509: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (1111)
+Shifting token number (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (1111)
+Shifting token number (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
 Starting parse
 Entering state 0
 Reading a token
@@ -211265,9 +206356,13 @@
 Entering state 16
 Cleanup: popping token "end of input" (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
+569. calc.at:1494: testing Calculator C++ %glr-parser %no-lines %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
+./calc.at:1494: mv calc.y.tmp calc.y
+
 input:
   | 1 2
 ./calc.at:1491:  $PREPARSER ./calc  input
+./calc.at:1494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
 stderr:
 Starting parse
 Entering state 0
@@ -211284,8 +206379,19 @@
 1.3: syntax error, unexpected number
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token "number" (1.3: 2)
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1486: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -211301,6 +206407,9 @@
 1.3: syntax error, unexpected number
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token "number" (1.3: 2)
+input:
+  | (# + 1) = 1111
+./calc.at:1486:  $PREPARSER ./calc  input
 ./calc.at:1491: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -211311,10 +206420,149 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-572. calc.at:1509: ./calc.at:1491: cat stderr
- skipped (calc.at:1509)
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Reading a token
+Next token is token number (1)
+Error: discarding token number (1)
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (1111)
+Shifting token number (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1491: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Reading a token
+Next token is token number (1)
+Error: discarding token number (1)
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (1111)
+Shifting token number (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
 input:
-
   | 1//2
 ./calc.at:1491:  $PREPARSER ./calc  input
 stderr:
@@ -211338,7 +206586,32 @@
 Error: popping token '/' (1.2: )
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '/' (1.3: )
+stderr:
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stdout:
+./calc.at:1494: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
+./calc.at:1487: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc
+
+./calc.at:1486: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -211360,6 +206633,24 @@
 Error: popping token '/' (1.2: )
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '/' (1.3: )
+input:
+input:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1487:  $PREPARSER ./calc  input
+  | (1 + # + 1) = 1111
+./calc.at:1486:  $PREPARSER ./calc  input
 ./calc.at:1491: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -211370,1439 +206661,1918 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1491: cat stderr
-input:
-573. calc.at:1510: testing Calculator D %locations  ...
-  | error
-./calc.at:1491:  $PREPARSER ./calc  input
-./calc.at:1510: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
 stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "invalid token" (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token "invalid token" (1.1: )
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token "invalid token" (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token "invalid token" (1.1: )
-./calc.at:1491: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1491: cat stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1491:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
 Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
-Entering state 18
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 27
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
 Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Error: popping token '=' (1.3: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '=' (1.7: )
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
+Next token is token '+' ()
+Error: discarding token '+' ()
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Next token is token number (1)
+Error: discarding token number (1)
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
 Entering state 8
 Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
+Next token is token '=' ()
+Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
+Next token is token number (1111)
+Shifting token number (1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
+   $1 = token number (1111)
+-> $$ = nterm exp (1111)
 Entering state 27
 Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Error: popping token '=' (1.3: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '=' (1.7: )
-573. calc.at:1510:  skipped (calc.at:1510)
-./calc.at:1491: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1491: cat stderr
-
-input:
-  | 
-  | +1
-./calc.at:1491:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 74):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+   $1 = nterm line ()
+-> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Cleanup: discarding lookahead token '+' (2.1: )
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 74):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
 Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Cleanup: discarding lookahead token '+' (2.1: )
-./calc.at:1491: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1491: cat stderr
-./calc.at:1491:  $PREPARSER ./calc  /dev/null
-stderr:
-Starting parse
-Entering state 0
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" (1.1: )
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 29
 Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" (1.1: )
-574. calc.at:1512: testing Calculator D parse.error=detailed api.prefix={calc} %verbose  ...
-./calc.at:1512: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
-./calc.at:1491: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1491: cat stderr
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1491:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Next token is token number (3)
+Shifting token number (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 30
 Reading a token
-Next token is token ')' (1.2: )
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token ')' (1.2: )
-Shifting token ')' (1.2: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.2: )
--> $$ = nterm exp (1.1-2: 1111)
+Next token is token '=' ()
+Reducing stack 0 by rule 9 (line 105):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 103):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
 Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
+Next token is token number (7)
+Shifting token number (7)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (7)
+-> $$ = nterm exp (7)
+Entering state 27
 Reading a token
-Next token is token '(' (1.6: )
-Shifting token '(' (1.6: )
-Entering state 4
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (7)
+   $2 = token '=' ()
+   $3 = nterm exp (7)
+-> $$ = nterm exp (7)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (7)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 82):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
 Reading a token
-Next token is token "number" (1.7: 1)
-Shifting token "number" (1.7: 1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.7: 1)
--> $$ = nterm exp (1.7: 1)
-Entering state 12
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
 Reading a token
-Next token is token '+' (1.9: )
-Shifting token '+' (1.9: )
+Next token is token '+' ()
+Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token "number" (1.11: 1)
-Shifting token "number" (1.11: 1)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.11: 1)
--> $$ = nterm exp (1.11: 1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
 Entering state 29
 Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.7: 1)
-   $2 = token '+' (1.9: )
-   $3 = nterm exp (1.11: 1)
--> $$ = nterm exp (1.7-11: 2)
-Entering state 12
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 20
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
 Reading a token
-Next token is token "number" (1.15: 1)
-Shifting token "number" (1.15: 1)
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
+Reading a token
+Next token is token number (3)
+Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.15: 1)
--> $$ = nterm exp (1.15: 1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 10
+Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 11 (line 115):
+   $1 = token '-' ()
+   $2 = nterm exp (3)
+-> $$ = nterm exp (-3)
+Entering state 30
+Next token is token '=' ()
+Reducing stack 0 by rule 9 (line 105):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (-3)
+-> $$ = nterm exp (-6)
 Entering state 29
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 103):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (-6)
+-> $$ = nterm exp (-5)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
 Reading a token
-Next token is token '+' (1.17: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.7-11: 2)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15: 1)
--> $$ = nterm exp (1.7-15: 3)
-Entering state 12
-Next token is token '+' (1.17: )
-Shifting token '+' (1.17: )
-Entering state 20
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
 Reading a token
-Next token is token ')' (1.18: )
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' (1.17: )
-Error: popping nterm exp (1.7-15: 3)
-Shifting token error (1.7-18: )
-Entering state 11
-Next token is token ')' (1.18: )
-Shifting token ')' (1.18: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.6: )
-   $2 = token error (1.7-18: )
-   $3 = token ')' (1.18: )
--> $$ = nterm exp (1.6-18: 1111)
-Entering state 29
+Next token is token number (5)
+Shifting token number (5)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (5)
+-> $$ = nterm exp (5)
+Entering state 10
 Reading a token
-Next token is token '+' (1.20: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-2: 1111)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6-18: 1111)
--> $$ = nterm exp (1.1-18: 2222)
+Next token is token '\n' ()
+Reducing stack 0 by rule 11 (line 115):
+   $1 = token '-' ()
+   $2 = nterm exp (5)
+-> $$ = nterm exp (-5)
+Entering state 27
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (-5)
+   $2 = token '=' ()
+   $3 = nterm exp (-5)
+-> $$ = nterm exp (-5)
 Entering state 8
-Next token is token '+' (1.20: )
-Shifting token '+' (1.20: )
-Entering state 20
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (-5)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
 Reading a token
-Next token is token '(' (1.22: )
-Shifting token '(' (1.22: )
-Entering state 4
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 87):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
 Reading a token
-Next token is token '*' (1.23: )
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.23: )
-Entering state 11
-Next token is token '*' (1.23: )
-Error: discarding token '*' (1.23: )
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
 Reading a token
-Next token is token '*' (1.25: )
-Error: discarding token '*' (1.25: )
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 10
 Reading a token
-Next token is token '*' (1.27: )
-Error: discarding token '*' (1.27: )
+Next token is token '^' ()
+Shifting token '^' ()
+Entering state 23
 Reading a token
-Next token is token ')' (1.28: )
-Entering state 11
-Next token is token ')' (1.28: )
-Shifting token ')' (1.28: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.22: )
-   $2 = token error (1.23-27: )
-   $3 = token ')' (1.28: )
--> $$ = nterm exp (1.22-28: 1111)
-Entering state 29
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 32
 Reading a token
-Next token is token '+' (1.30: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-18: 2222)
-   $2 = token '+' (1.20: )
-   $3 = nterm exp (1.22-28: 1111)
--> $$ = nterm exp (1.1-28: 3333)
+Next token is token '=' ()
+Reducing stack 0 by rule 12 (line 116):
+   $1 = nterm exp (1)
+   $2 = token '^' ()
+   $3 = nterm exp (2)
+-> $$ = nterm exp (1)
+Entering state 10
+Next token is token '=' ()
+Reducing stack 0 by rule 11 (line 115):
+   $1 = token '-' ()
+   $2 = nterm exp (1)
+-> $$ = nterm exp (-1)
 Entering state 8
-Next token is token '+' (1.30: )
-Shifting token '+' (1.30: )
-Entering state 20
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
 Reading a token
-Next token is token '(' (1.32: )
-Shifting token '(' (1.32: )
-Entering state 4
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
 Reading a token
-Next token is token "number" (1.33: 1)
-Shifting token "number" (1.33: 1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.33: 1)
--> $$ = nterm exp (1.33: 1)
-Entering state 12
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 10
 Reading a token
-Next token is token '*' (1.35: )
-Shifting token '*' (1.35: )
-Entering state 21
+Next token is token '\n' ()
+Reducing stack 0 by rule 11 (line 115):
+   $1 = token '-' ()
+   $2 = nterm exp (1)
+-> $$ = nterm exp (-1)
+Entering state 27
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (-1)
+   $2 = token '=' ()
+   $3 = nterm exp (-1)
+-> $$ = nterm exp (-1)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (-1)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
 Reading a token
-Next token is token "number" (1.37: 2)
-Shifting token "number" (1.37: 2)
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.37: 2)
--> $$ = nterm exp (1.37: 2)
-Entering state 30
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 10
 Reading a token
-Next token is token '*' (1.39: )
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1.33: 1)
-   $2 = token '*' (1.35: )
-   $3 = nterm exp (1.37: 2)
--> $$ = nterm exp (1.33-37: 2)
+Next token is token ')' ()
+Reducing stack 0 by rule 11 (line 115):
+   $1 = token '-' ()
+   $2 = nterm exp (1)
+-> $$ = nterm exp (-1)
 Entering state 12
-Next token is token '*' (1.39: )
-Shifting token '*' (1.39: )
-Entering state 21
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 117):
+   $1 = token '(' ()
+   $2 = nterm exp (-1)
+   $3 = token ')' ()
+-> $$ = nterm exp (-1)
+Entering state 8
 Reading a token
-Next token is token '*' (1.41: )
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' (1.39: )
-Error: popping nterm exp (1.33-37: 2)
-Shifting token error (1.33-41: )
-Entering state 11
-Next token is token '*' (1.41: )
-Error: discarding token '*' (1.41: )
+Next token is token '^' ()
+Shifting token '^' ()
+Entering state 23
 Reading a token
-Next token is token ')' (1.42: )
-Entering state 11
-Next token is token ')' (1.42: )
-Shifting token ')' (1.42: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.32: )
-   $2 = token error (1.33-41: )
-   $3 = token ')' (1.42: )
--> $$ = nterm exp (1.32-42: 1111)
-Entering state 29
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 32
 Reading a token
-Next token is token '=' (1.44: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-28: 3333)
-   $2 = token '+' (1.30: )
-   $3 = nterm exp (1.32-42: 1111)
--> $$ = nterm exp (1.1-42: 4444)
+Next token is token '=' ()
+Reducing stack 0 by rule 12 (line 116):
+   $1 = nterm exp (-1)
+   $2 = token '^' ()
+   $3 = nterm exp (2)
+-> $$ = nterm exp (1)
 Entering state 8
-Next token is token '=' (1.44: )
-Shifting token '=' (1.44: )
+Next token is token '=' ()
+Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (1.46: 1)
-Shifting token "number" (1.46: 1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.46: 1)
--> $$ = nterm exp (1.46: 1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
 Entering state 27
 Reading a token
-Next token is token '\n' (1.47-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-42: 4444)
-   $2 = token '=' (1.44: )
-   $3 = nterm exp (1.46: 1)
-1.1-46: error: 4444 != 1
--> $$ = nterm exp (1.1-46: 4444)
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (1)
+   $2 = token '=' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (1)
 Entering state 8
-Next token is token '\n' (1.47-2.0: )
-Shifting token '\n' (1.47-2.0: )
+Next token is token '\n' ()
+Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-46: 4444)
-   $2 = token '\n' (1.47-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (1)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 87):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
 Reading a token
-Next token is token ')' (1.2: )
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token ')' (1.2: )
-Shifting token ')' (1.2: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.2: )
--> $$ = nterm exp (1.1-2: 1111)
-Entering state 8
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
 Reading a token
-Next token is token '(' (1.6: )
-Shifting token '(' (1.6: )
-Entering state 4
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
 Reading a token
-Next token is token "number" (1.7: 1)
-Shifting token "number" (1.7: 1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.7: 1)
--> $$ = nterm exp (1.7: 1)
-Entering state 12
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 10
 Reading a token
-Next token is token '+' (1.9: )
-Shifting token '+' (1.9: )
-Entering state 20
+Next token is token '=' ()
+Reducing stack 0 by rule 11 (line 115):
+   $1 = token '-' ()
+   $2 = nterm exp (1)
+-> $$ = nterm exp (-1)
+Entering state 10
+Next token is token '=' ()
+Reducing stack 0 by rule 11 (line 115):
+   $1 = token '-' ()
+   $2 = nterm exp (-1)
+-> $$ = nterm exp (1)
+Entering state 10
+Next token is token '=' ()
+Reducing stack 0 by rule 11 (line 115):
+   $1 = token '-' ()
+   $2 = nterm exp (1)
+-> $$ = nterm exp (-1)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
 Reading a token
-Next token is token "number" (1.11: 1)
-Shifting token "number" (1.11: 1)
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.11: 1)
--> $$ = nterm exp (1.11: 1)
-Entering state 29
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 10
 Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.7: 1)
-   $2 = token '+' (1.9: )
-   $3 = nterm exp (1.11: 1)
--> $$ = nterm exp (1.7-11: 2)
-Entering state 12
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 20
+Next token is token '\n' ()
+Reducing stack 0 by rule 11 (line 115):
+   $1 = token '-' ()
+   $2 = nterm exp (1)
+-> $$ = nterm exp (-1)
+Entering state 27
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (-1)
+   $2 = token '=' ()
+   $3 = nterm exp (-1)
+-> $$ = nterm exp (-1)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (-1)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
 Reading a token
-Next token is token "number" (1.15: 1)
-Shifting token "number" (1.15: 1)
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 87):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.15: 1)
--> $$ = nterm exp (1.15: 1)
-Entering state 29
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
 Reading a token
-Next token is token '+' (1.17: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.7-11: 2)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15: 1)
--> $$ = nterm exp (1.7-15: 3)
-Entering state 12
-Next token is token '+' (1.17: )
-Shifting token '+' (1.17: )
-Entering state 20
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
 Reading a token
-Next token is token ')' (1.18: )
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' (1.17: )
-Error: popping nterm exp (1.7-15: 3)
-Shifting token error (1.7-18: )
-Entering state 11
-Next token is token ')' (1.18: )
-Shifting token ')' (1.18: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.6: )
-   $2 = token error (1.7-18: )
-   $3 = token ')' (1.18: )
--> $$ = nterm exp (1.6-18: 1111)
-Entering state 29
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 28
 Reading a token
-Next token is token '+' (1.20: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-2: 1111)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6-18: 1111)
--> $$ = nterm exp (1.1-18: 2222)
+Next token is token '-' ()
+Reducing stack 0 by rule 8 (line 104):
+   $1 = nterm exp (1)
+   $2 = token '-' ()
+   $3 = nterm exp (2)
+-> $$ = nterm exp (-1)
 Entering state 8
-Next token is token '+' (1.20: )
-Shifting token '+' (1.20: )
-Entering state 20
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
 Reading a token
-Next token is token '(' (1.22: )
-Shifting token '(' (1.22: )
-Entering state 4
+Next token is token number (3)
+Shifting token number (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 28
 Reading a token
-Next token is token '*' (1.23: )
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.23: )
-Entering state 11
-Next token is token '*' (1.23: )
-Error: discarding token '*' (1.23: )
+Next token is token '=' ()
+Reducing stack 0 by rule 8 (line 104):
+   $1 = nterm exp (-1)
+   $2 = token '-' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (-4)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
 Reading a token
-Next token is token '*' (1.25: )
-Error: discarding token '*' (1.25: )
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
 Reading a token
-Next token is token '*' (1.27: )
-Error: discarding token '*' (1.27: )
+Next token is token number (4)
+Shifting token number (4)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (4)
+-> $$ = nterm exp (4)
+Entering state 10
 Reading a token
-Next token is token ')' (1.28: )
-Entering state 11
-Next token is token ')' (1.28: )
-Shifting token ')' (1.28: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.22: )
-   $2 = token error (1.23-27: )
-   $3 = token ')' (1.28: )
--> $$ = nterm exp (1.22-28: 1111)
-Entering state 29
+Next token is token '\n' ()
+Reducing stack 0 by rule 11 (line 115):
+   $1 = token '-' ()
+   $2 = nterm exp (4)
+-> $$ = nterm exp (-4)
+Entering state 27
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (-4)
+   $2 = token '=' ()
+   $3 = nterm exp (-4)
+-> $$ = nterm exp (-4)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (-4)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
 Reading a token
-Next token is token '+' (1.30: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-18: 2222)
-   $2 = token '+' (1.20: )
-   $3 = nterm exp (1.22-28: 1111)
--> $$ = nterm exp (1.1-28: 3333)
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
 Entering state 8
-Next token is token '+' (1.30: )
-Shifting token '+' (1.30: )
-Entering state 20
 Reading a token
-Next token is token '(' (1.32: )
-Shifting token '(' (1.32: )
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token "number" (1.33: 1)
-Shifting token "number" (1.33: 1)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.33: 1)
--> $$ = nterm exp (1.33: 1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
 Entering state 12
 Reading a token
-Next token is token '*' (1.35: )
-Shifting token '*' (1.35: )
-Entering state 21
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
 Reading a token
-Next token is token "number" (1.37: 2)
-Shifting token "number" (1.37: 2)
+Next token is token number (3)
+Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.37: 2)
--> $$ = nterm exp (1.37: 2)
-Entering state 30
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 28
 Reading a token
-Next token is token '*' (1.39: )
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1.33: 1)
-   $2 = token '*' (1.35: )
-   $3 = nterm exp (1.37: 2)
--> $$ = nterm exp (1.33-37: 2)
+Next token is token ')' ()
+Reducing stack 0 by rule 8 (line 104):
+   $1 = nterm exp (2)
+   $2 = token '-' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (-1)
 Entering state 12
-Next token is token '*' (1.39: )
-Shifting token '*' (1.39: )
-Entering state 21
-Reading a token
-Next token is token '*' (1.41: )
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' (1.39: )
-Error: popping nterm exp (1.33-37: 2)
-Shifting token error (1.33-41: )
-Entering state 11
-Next token is token '*' (1.41: )
-Error: discarding token '*' (1.41: )
-Reading a token
-Next token is token ')' (1.42: )
-Entering state 11
-Next token is token ')' (1.42: )
-Shifting token ')' (1.42: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.32: )
-   $2 = token error (1.33-41: )
-   $3 = token ')' (1.42: )
--> $$ = nterm exp (1.32-42: 1111)
-Entering state 29
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 117):
+   $1 = token '(' ()
+   $2 = nterm exp (-1)
+   $3 = token ')' ()
+-> $$ = nterm exp (-1)
+Entering state 28
 Reading a token
-Next token is token '=' (1.44: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-28: 3333)
-   $2 = token '+' (1.30: )
-   $3 = nterm exp (1.32-42: 1111)
--> $$ = nterm exp (1.1-42: 4444)
+Next token is token '=' ()
+Reducing stack 0 by rule 8 (line 104):
+   $1 = nterm exp (1)
+   $2 = token '-' ()
+   $3 = nterm exp (-1)
+-> $$ = nterm exp (2)
 Entering state 8
-Next token is token '=' (1.44: )
-Shifting token '=' (1.44: )
+Next token is token '=' ()
+Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (1.46: 1)
-Shifting token "number" (1.46: 1)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.46: 1)
--> $$ = nterm exp (1.46: 1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
 Entering state 27
 Reading a token
-Next token is token '\n' (1.47-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-42: 4444)
-   $2 = token '=' (1.44: )
-   $3 = nterm exp (1.46: 1)
-1.1-46: error: 4444 != 1
--> $$ = nterm exp (1.1-46: 4444)
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (2)
+   $2 = token '=' ()
+   $3 = nterm exp (2)
+-> $$ = nterm exp (2)
 Entering state 8
-Next token is token '\n' (1.47-2.0: )
-Shifting token '\n' (1.47-2.0: )
+Next token is token '\n' ()
+Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-46: 4444)
-   $2 = token '\n' (1.47-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (2)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-574. calc.at:1512:  skipped (calc.at:1512)
-./calc.at:1491: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1491: cat stderr
-
-input:
-  | (!!) + (1 2) = 1
-./calc.at:1491:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '!' (1.2: )
-Shifting token '!' (1.2: )
-Entering state 5
-Reading a token
-Next token is token '!' (1.3: )
-Shifting token '!' (1.3: )
-Entering state 15
-Reducing stack 0 by rule 16 (line 107):
-   $1 = token '!' (1.2: )
-   $2 = token '!' (1.3: )
-Shifting token error (1.2-3: )
-Entering state 11
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 87):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
 Reading a token
-Next token is token ')' (1.4: )
-Shifting token ')' (1.4: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-3: )
-   $3 = token ')' (1.4: )
--> $$ = nterm exp (1.1-4: 1111)
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
 Entering state 8
 Reading a token
-Next token is token '+' (1.6: )
-Shifting token '+' (1.6: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.8: )
-Shifting token '(' (1.8: )
-Entering state 4
+Next token is token '^' ()
+Shifting token '^' ()
+Entering state 23
 Reading a token
-Next token is token "number" (1.9: 1)
-Shifting token "number" (1.9: 1)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.9: 1)
--> $$ = nterm exp (1.9: 1)
-Entering state 12
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 32
 Reading a token
-Next token is token "number" (1.11: 2)
-1.11: syntax error, unexpected number
-Error: popping nterm exp (1.9: 1)
-Shifting token error (1.9-11: )
-Entering state 11
-Next token is token "number" (1.11: 2)
-Error: discarding token "number" (1.11: 2)
+Next token is token '^' ()
+Shifting token '^' ()
+Entering state 23
 Reading a token
-Next token is token ')' (1.12: )
-Entering state 11
-Next token is token ')' (1.12: )
-Shifting token ')' (1.12: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.8: )
-   $2 = token error (1.9-11: )
-   $3 = token ')' (1.12: )
--> $$ = nterm exp (1.8-12: 1111)
-Entering state 29
+Next token is token number (3)
+Shifting token number (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 32
 Reading a token
-Next token is token '=' (1.14: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-4: 1111)
-   $2 = token '+' (1.6: )
-   $3 = nterm exp (1.8-12: 1111)
--> $$ = nterm exp (1.1-12: 2222)
+Next token is token '=' ()
+Reducing stack 0 by rule 12 (line 116):
+   $1 = nterm exp (2)
+   $2 = token '^' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (8)
+Entering state 32
+Next token is token '=' ()
+Reducing stack 0 by rule 12 (line 116):
+   $1 = nterm exp (2)
+   $2 = token '^' ()
+   $3 = nterm exp (8)
+-> $$ = nterm exp (256)
 Entering state 8
-Next token is token '=' (1.14: )
-Shifting token '=' (1.14: )
+Next token is token '=' ()
+Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (1.16: 1)
-Shifting token "number" (1.16: 1)
+Next token is token number (256)
+Shifting token number (256)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.16: 1)
--> $$ = nterm exp (1.16: 1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (256)
+-> $$ = nterm exp (256)
 Entering state 27
 Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-12: 2222)
-   $2 = token '=' (1.14: )
-   $3 = nterm exp (1.16: 1)
-1.1-16: error: 2222 != 1
--> $$ = nterm exp (1.1-16: 2222)
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (256)
+   $2 = token '=' ()
+   $3 = nterm exp (256)
+-> $$ = nterm exp (256)
 Entering state 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
+Next token is token '\n' ()
+Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (256)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
+Next token is token '(' ()
+Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '!' (1.2: )
-Shifting token '!' (1.2: )
-Entering state 5
-Reading a token
-Next token is token '!' (1.3: )
-Shifting token '!' (1.3: )
-Entering state 15
-Reducing stack 0 by rule 16 (line 107):
-   $1 = token '!' (1.2: )
-   $2 = token '!' (1.3: )
-Shifting token error (1.2-3: )
-Entering state 11
-Reading a token
-Next token is token ')' (1.4: )
-Shifting token ')' (1.4: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-3: )
-   $3 = token ')' (1.4: )
--> $$ = nterm exp (1.1-4: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.6: )
-Shifting token '+' (1.6: )
-Entering state 20
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 12
 Reading a token
-Next token is token '(' (1.8: )
-Shifting token '(' (1.8: )
-Entering state 4
+Next token is token '^' ()
+Shifting token '^' ()
+Entering state 23
 Reading a token
-Next token is token "number" (1.9: 1)
-Shifting token "number" (1.9: 1)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.9: 1)
--> $$ = nterm exp (1.9: 1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 32
+Reading a token
+Next token is token ')' ()
+Reducing stack 0 by rule 12 (line 116):
+   $1 = nterm exp (2)
+   $2 = token '^' ()
+   $3 = nterm exp (2)
+-> $$ = nterm exp (4)
 Entering state 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 117):
+   $1 = token '(' ()
+   $2 = nterm exp (4)
+   $3 = token ')' ()
+-> $$ = nterm exp (4)
+Entering state 8
 Reading a token
-Next token is token "number" (1.11: 2)
-1.11: syntax error, unexpected number
-Error: popping nterm exp (1.9: 1)
-Shifting token error (1.9-11: )
-Entering state 11
-Next token is token "number" (1.11: 2)
-Error: discarding token "number" (1.11: 2)
+Next token is token '^' ()
+Shifting token '^' ()
+Entering state 23
 Reading a token
-Next token is token ')' (1.12: )
-Entering state 11
-Next token is token ')' (1.12: )
-Shifting token ')' (1.12: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.8: )
-   $2 = token error (1.9-11: )
-   $3 = token ')' (1.12: )
--> $$ = nterm exp (1.8-12: 1111)
-Entering state 29
+Next token is token number (3)
+Shifting token number (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 32
 Reading a token
-Next token is token '=' (1.14: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-4: 1111)
-   $2 = token '+' (1.6: )
-   $3 = nterm exp (1.8-12: 1111)
--> $$ = nterm exp (1.1-12: 2222)
+Next token is token '=' ()
+Reducing stack 0 by rule 12 (line 116):
+   $1 = nterm exp (4)
+   $2 = token '^' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (64)
 Entering state 8
-Next token is token '=' (1.14: )
-Shifting token '=' (1.14: )
+Next token is token '=' ()
+Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (1.16: 1)
-Shifting token "number" (1.16: 1)
+Next token is token number (64)
+Shifting token number (64)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.16: 1)
--> $$ = nterm exp (1.16: 1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (64)
+-> $$ = nterm exp (64)
 Entering state 27
 Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-12: 2222)
-   $2 = token '=' (1.14: )
-   $3 = nterm exp (1.16: 1)
-1.1-16: error: 2222 != 1
--> $$ = nterm exp (1.1-16: 2222)
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (64)
+   $2 = token '=' ()
+   $3 = nterm exp (64)
+-> $$ = nterm exp (64)
 Entering state 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
+Next token is token '\n' ()
+Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (64)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token "end of input" (2.1: )
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1491: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+Cleanup: popping token end of file ()
+Cleanup: popping nterm input ()
 ./calc.at:1491: cat stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1491:  $PREPARSER ./calc  input
+./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
-Entering state 2
-Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Reducing stack 0 by rule 15 (line 106):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
-Shifting token error (1.2-4: )
-Entering state 11
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Reading a token
-Next token is token ')' (1.5: )
-Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
 Entering state 8
 Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
+Next token is token '+' ()
+Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.10: 1)
-Shifting token "number" (1.10: 1)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.10: 1)
--> $$ = nterm exp (1.10: 1)
-Entering state 12
-Reading a token
-Next token is token "number" (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Shifting token error (1.10-12: )
-Entering state 11
-Next token is token "number" (1.12: 2)
-Error: discarding token "number" (1.12: 2)
-Reading a token
-Next token is token ')' (1.13: )
-Entering state 11
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
 Entering state 29
 Reading a token
-Next token is token '=' (1.15: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
-Entering state 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token number (3)
+Shifting token number (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 30
+Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 9 (line 105):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 103):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (1.17: 1)
-Shifting token "number" (1.17: 1)
+Next token is token number (7)
+Shifting token number (7)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.17: 1)
--> $$ = nterm exp (1.17: 1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (7)
+-> $$ = nterm exp (7)
 Entering state 27
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (7)
+   $2 = token '=' ()
+   $3 = nterm exp (7)
+-> $$ = nterm exp (7)
 Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' ()
+Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2222)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (7)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+Reducing stack 0 by rule 1 (line 82):
+   $1 = nterm line ()
+-> $$ = nterm input ()
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 29
+Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
 Entering state 2
 Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Reducing stack 0 by rule 15 (line 106):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
-Shifting token error (1.2-4: )
-Entering state 11
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
+Next token is token number (3)
+Shifting token number (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 10
 Reading a token
-Next token is token ')' (1.5: )
-Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
+Next token is token '=' ()
+Reducing stack 0 by rule 11 (line 115):
+   $1 = token '-' ()
+   $2 = nterm exp (3)
+-> $$ = nterm exp (-3)
+Entering state 30
+Next token is token '=' ()
+Reducing stack 0 by rule 9 (line 105):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (-3)
+-> $$ = nterm exp (-6)
+Entering state 29
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 103):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (-6)
+-> $$ = nterm exp (-5)
 Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
 Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 20
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
 Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
+Next token is token number (5)
+Shifting token number (5)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (5)
+-> $$ = nterm exp (5)
+Entering state 10
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 11 (line 115):
+   $1 = token '-' ()
+   $2 = nterm exp (5)
+-> $$ = nterm exp (-5)
+Entering state 27
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (-5)
+   $2 = token '=' ()
+   $3 = nterm exp (-5)
+-> $$ = nterm exp (-5)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (-5)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 87):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 10
+Reading a token
+Next token is token '^' ()
+Shifting token '^' ()
+Entering state 23
+Reading a token
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 32
+Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 12 (line 116):
+   $1 = nterm exp (1)
+   $2 = token '^' ()
+   $3 = nterm exp (2)
+-> $$ = nterm exp (1)
+Entering state 10
+Next token is token '=' ()
+Reducing stack 0 by rule 11 (line 115):
+   $1 = token '-' ()
+   $2 = nterm exp (1)
+-> $$ = nterm exp (-1)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 10
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 11 (line 115):
+   $1 = token '-' ()
+   $2 = nterm exp (1)
+-> $$ = nterm exp (-1)
+Entering state 27
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (-1)
+   $2 = token '=' ()
+   $3 = nterm exp (-1)
+-> $$ = nterm exp (-1)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (-1)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token "number" (1.10: 1)
-Shifting token "number" (1.10: 1)
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.10: 1)
--> $$ = nterm exp (1.10: 1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 10
+Reading a token
+Next token is token ')' ()
+Reducing stack 0 by rule 11 (line 115):
+   $1 = token '-' ()
+   $2 = nterm exp (1)
+-> $$ = nterm exp (-1)
 Entering state 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 117):
+   $1 = token '(' ()
+   $2 = nterm exp (-1)
+   $3 = token ')' ()
+-> $$ = nterm exp (-1)
+Entering state 8
 Reading a token
-Next token is token "number" (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Shifting token error (1.10-12: )
-Entering state 11
-Next token is token "number" (1.12: 2)
-Error: discarding token "number" (1.12: 2)
+Next token is token '^' ()
+Shifting token '^' ()
+Entering state 23
 Reading a token
-Next token is token ')' (1.13: )
-Entering state 11
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
-Entering state 29
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 32
 Reading a token
-Next token is token '=' (1.15: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
+Next token is token '=' ()
+Reducing stack 0 by rule 12 (line 116):
+   $1 = nterm exp (-1)
+   $2 = token '^' ()
+   $3 = nterm exp (2)
+-> $$ = nterm exp (1)
 Entering state 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
+Next token is token '=' ()
+Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (1.17: 1)
-Shifting token "number" (1.17: 1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.17: 1)
--> $$ = nterm exp (1.17: 1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
 Entering state 27
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (1)
+   $2 = token '=' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (1)
 Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' ()
+Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2222)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (1)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-575. calc.at:1514: testing Calculator D %debug  ...
-./calc.at:1514: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
-./calc.at:1491: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1491: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1491:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 87):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
 Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
 Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
 Reading a token
-Next token is token ')' (1.5: )
-Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 10
+Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 11 (line 115):
+   $1 = token '-' ()
+   $2 = nterm exp (1)
+-> $$ = nterm exp (-1)
+Entering state 10
+Next token is token '=' ()
+Reducing stack 0 by rule 11 (line 115):
+   $1 = token '-' ()
+   $2 = nterm exp (-1)
+-> $$ = nterm exp (1)
+Entering state 10
+Next token is token '=' ()
+Reducing stack 0 by rule 11 (line 115):
+   $1 = token '-' ()
+   $2 = nterm exp (1)
+-> $$ = nterm exp (-1)
 Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
 Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 20
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
 Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 10
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 11 (line 115):
+   $1 = token '-' ()
+   $2 = nterm exp (1)
+-> $$ = nterm exp (-1)
+Entering state 27
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (-1)
+   $2 = token '=' ()
+   $3 = nterm exp (-1)
+-> $$ = nterm exp (-1)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (-1)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 87):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
+Reading a token
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 28
+Reading a token
+Next token is token '-' ()
+Reducing stack 0 by rule 8 (line 104):
+   $1 = nterm exp (1)
+   $2 = token '-' ()
+   $3 = nterm exp (2)
+-> $$ = nterm exp (-1)
+Entering state 8
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
+Reading a token
+Next token is token number (3)
+Shifting token number (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 28
+Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 8 (line 104):
+   $1 = nterm exp (-1)
+   $2 = token '-' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (-4)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
+Reading a token
+Next token is token number (4)
+Shifting token number (4)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (4)
+-> $$ = nterm exp (4)
+Entering state 10
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 11 (line 115):
+   $1 = token '-' ()
+   $2 = nterm exp (4)
+-> $$ = nterm exp (-4)
+Entering state 27
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (-4)
+   $2 = token '=' ()
+   $3 = nterm exp (-4)
+-> $$ = nterm exp (-4)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (-4)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 12
 Reading a token
-Next token is token ')' (1.11: )
-Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
-Entering state 29
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
 Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
+Next token is token number (3)
+Shifting token number (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 28
+Reading a token
+Next token is token ')' ()
+Reducing stack 0 by rule 8 (line 104):
+   $1 = nterm exp (2)
+   $2 = token '-' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (-1)
+Entering state 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 117):
+   $1 = token '(' ()
+   $2 = nterm exp (-1)
+   $3 = token ')' ()
+-> $$ = nterm exp (-1)
+Entering state 28
+Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 8 (line 104):
+   $1 = nterm exp (1)
+   $2 = token '-' ()
+   $3 = nterm exp (-1)
+-> $$ = nterm exp (2)
 Entering state 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 20
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
 Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (2)
+   $2 = token '=' ()
+   $3 = nterm exp (2)
+-> $$ = nterm exp (2)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (2)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 87):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 8
+Reading a token
+Next token is token '^' ()
+Shifting token '^' ()
+Entering state 23
+Reading a token
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 32
+Reading a token
+Next token is token '^' ()
+Shifting token '^' ()
+Entering state 23
+Reading a token
+Next token is token number (3)
+Shifting token number (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 32
+Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 12 (line 116):
+   $1 = nterm exp (2)
+   $2 = token '^' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (8)
+Entering state 32
+Next token is token '=' ()
+Reducing stack 0 by rule 12 (line 116):
+   $1 = nterm exp (2)
+   $2 = token '^' ()
+   $3 = nterm exp (8)
+-> $$ = nterm exp (256)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (256)
+Shifting token number (256)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (256)
+-> $$ = nterm exp (256)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (256)
+   $2 = token '=' ()
+   $3 = nterm exp (256)
+-> $$ = nterm exp (256)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (256)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 12
 Reading a token
-Next token is token ')' (1.17: )
-Entering state 11
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 29
+Next token is token '^' ()
+Shifting token '^' ()
+Entering state 23
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 32
+Reading a token
+Next token is token ')' ()
+Reducing stack 0 by rule 12 (line 116):
+   $1 = nterm exp (2)
+   $2 = token '^' ()
+   $3 = nterm exp (2)
+-> $$ = nterm exp (4)
+Entering state 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 117):
+   $1 = token '(' ()
+   $2 = nterm exp (4)
+   $3 = token ')' ()
+-> $$ = nterm exp (4)
 Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Reading a token
+Next token is token '^' ()
+Shifting token '^' ()
+Entering state 23
+Reading a token
+Next token is token number (3)
+Shifting token number (3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 32
+Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 12 (line 116):
+   $1 = nterm exp (4)
+   $2 = token '^' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (64)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (64)
+Shifting token number (64)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (64)
+-> $$ = nterm exp (64)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (64)
+   $2 = token '=' ()
+   $3 = nterm exp (64)
+-> $$ = nterm exp (64)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 3333)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (64)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 17
+Reducing stack 0 by rule 2 (line 83):
+   $1 = nterm input ()
+   $2 = nterm line ()
+-> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token "end of input" (2.1: )
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Cleanup: popping token end of file ()
+Cleanup: popping nterm input ()
+input:
 stderr:
+input:
+  | 1 2
+./calc.at:1487:  $PREPARSER ./calc  input
+  | error
+./calc.at:1491:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
+Next token is token '(' ()
+Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
 Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token ')' (1.5: )
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Error: popping nterm exp (1)
+Shifting token error ()
 Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 20
+Next token is token error ()
+Error: discarding token error ()
 Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
+Next token is token '+' ()
+Error: discarding token '+' ()
 Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
+Next token is token number (1)
+Error: discarding token number (1)
 Reading a token
-Next token is token ')' (1.11: )
+Next token is token ')' ()
 Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
+Next token is token ')' ()
+Shifting token ')' ()
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
-Entering state 29
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
 Entering state 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
-Entering state 4
 Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
 Reading a token
-Next token is token ')' (1.17: )
-Entering state 11
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 29
+Next token is token number (1111)
+Shifting token number (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
 Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' ()
+Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 3333)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+   $1 = nterm line ()
+-> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token "end of input" (2.1: )
+Shifting token end of input ()
 Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token number (2)
+syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token number (2)
+stderr:
+./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "invalid token" (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" (1.1: )
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token number (2)
+syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n'])
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token number (2)
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1486: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "invalid token" (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" (1.1: )
+input:
+./calc.at:1487: cat stderr
 ./calc.at:1491: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -212813,149 +208583,292 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-575. calc.at:1514:  skipped (calc.at:1514)
+  | (1 + 1) / (1 - 1)
+./calc.at:1486:  $PREPARSER ./calc  input
 ./calc.at:1491: cat stderr
 input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1491:  $PREPARSER ./calc  input
-
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
+Next token is token '+' ()
+Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
+   $1 = token number (1)
+-> $$ = nterm exp (1)
 Entering state 29
 Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
+Next token is token ')' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' ()
+   $2 = nterm exp (2)
+   $3 = token ')' ()
+-> $$ = nterm exp (2)
+Entering state 8
 Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
 Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
 Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 28
 Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Reducing stack 0 by rule 17 (line 108):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Next token is token ')' ()
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1)
+   $2 = token '-' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (0)
+Entering state 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' ()
+   $2 = nterm exp (0)
+   $3 = token ')' ()
+-> $$ = nterm exp (0)
+Entering state 31
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 10 (line 93):
+   $1 = nterm exp (2)
+   $2 = token '/' ()
+   $3 = nterm exp (0)
+error: null divisor
+-> $$ = nterm exp (2)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1//2
+./calc.at:1487:  $PREPARSER ./calc  input
+stderr:
 stderr:
+input:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
 Entering state 8
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
 Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
+Next token is token '/' ()
+syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
+Error: popping token '/' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '/' ()
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 29
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
 Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
+   $1 = token number (1)
+-> $$ = nterm exp (1)
 Entering state 29
-Next token is token '+' (1.11: )
+Reading a token
+Next token is token ')' ()
 Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' ()
+   $2 = nterm exp (2)
+   $3 = token ')' ()
+-> $$ = nterm exp (2)
 Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
 Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
 Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Reducing stack 0 by rule 17 (line 108):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-input:
-  | 1 + 2 * 3 + !- ++
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 28
+Reading a token
+Next token is token ')' ()
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1)
+   $2 = token '-' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (0)
+Entering state 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' ()
+   $2 = nterm exp (0)
+   $3 = token ')' ()
+-> $$ = nterm exp (0)
+Entering state 31
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 10 (line 93):
+   $1 = nterm exp (2)
+   $2 = token '/' ()
+   $3 = nterm exp (0)
+error: null divisor
+-> $$ = nterm exp (2)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | 1 = 2 = 3
 ./calc.at:1491:  $PREPARSER ./calc  input
 stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
+Reading a token
+Next token is token '/' ()
+syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!'])
+Error: popping token '/' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '/' ()
 Starting parse
 Entering state 0
 Reading a token
@@ -212967,9 +208880,9 @@
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 18
 Reading a token
 Next token is token "number" (1.5: 2)
 Shifting token "number" (1.5: 2)
@@ -212977,52 +208890,26 @@
 Reducing stack 0 by rule 5 (line 79):
    $1 = token "number" (1.5: 2)
 -> $$ = nterm exp (1.5: 2)
-Entering state 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
+Entering state 27
 Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Reducing stack 0 by rule 18 (line 109):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Error: popping token '=' (1.3: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '=' (1.7: )
 ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1486: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Reading a token
@@ -213034,9 +208921,9 @@
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 18
 Reading a token
 Next token is token "number" (1.5: 2)
 Shifting token "number" (1.5: 2)
@@ -213044,52 +208931,16 @@
 Reducing stack 0 by rule 5 (line 79):
    $1 = token "number" (1.5: 2)
 -> $$ = nterm exp (1.5: 2)
-Entering state 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
+Entering state 27
 Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Reducing stack 0 by rule 18 (line 109):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-576. calc.at:1516: testing Calculator D parse.error=custom  ...
-./calc.at:1516: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Error: popping token '=' (1.3: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '=' (1.7: )
+./calc.at:1487: cat stderr
+./calc.at:1486: cat stderr
 ./calc.at:1491: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -213100,96 +208951,40 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1491: cat stderr
 input:
-  | (#) + (#) = 2222
-./calc.at:1491:  $PREPARSER ./calc  input
+  | error
+./calc.at:1487:  $PREPARSER ./calc  input
+561. calc.at:1486:  ok
+./calc.at:1491: cat stderr
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token ')' (1.3: )
-Entering state 11
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
-Entering state 4
-Reading a token
-1.8: syntax error: invalid character: '#'
-Next token is token error (1.8: )
-Shifting token error (1.8: )
-Entering state 11
-Next token is token error (1.8: )
-Error: discarding token error (1.8: )
-Reading a token
-Next token is token ')' (1.9: )
-Entering state 11
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 29
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
-Entering state 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 18
+Next token is token invalid token ()
+syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+Cleanup: discarding lookahead token invalid token ()
+./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
+  | 
+  | +1
+./calc.at:1491:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
 Reading a token
-Next token is token "number" (1.13-16: 2222)
-Shifting token "number" (1.13-16: 2222)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
-Entering state 27
+Next token is token invalid token ()
+syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+Cleanup: discarding lookahead token invalid token ()
+
+stderr:
+Starting parse
+Entering state 0
 Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 74):
+   $1 = token '\n' (1.1-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
@@ -213197,98 +208992,21 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Cleanup: discarding lookahead token '+' (2.1: )
 ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1487: cat stderr
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token ')' (1.3: )
-Entering state 11
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
-Entering state 4
-Reading a token
-1.8: syntax error: invalid character: '#'
-Next token is token error (1.8: )
-Shifting token error (1.8: )
-Entering state 11
-Next token is token error (1.8: )
-Error: discarding token error (1.8: )
-Reading a token
-Next token is token ')' (1.9: )
-Entering state 11
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 29
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
-Entering state 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 18
-Reading a token
-Next token is token "number" (1.13-16: 2222)
-Shifting token "number" (1.13-16: 2222)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 74):
+   $1 = token '\n' (1.1-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
@@ -213296,11 +209014,10 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Cleanup: discarding lookahead token '+' (2.1: )
 ./calc.at:1491: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -213311,169 +209028,92 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+  | 1 = 2 = 3
+./calc.at:1487:  $PREPARSER ./calc  input
 ./calc.at:1491: cat stderr
-576. calc.at:1516: input:
- skipped (calc.at:1516)
-  | (1 + #) = 1111
-./calc.at:1491:  $PREPARSER ./calc  input
 stderr:
-
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
-Reading a token
-Next token is token ')' (1.7: )
-Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
 Entering state 8
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
+Next token is token '=' ()
+Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
 Entering state 27
 Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Next token is token '=' ()
+syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
+Error: popping nterm exp (2)
+Error: popping token '=' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '=' ()
+./calc.at:1491:  $PREPARSER ./calc  /dev/null
+./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+570. calc.at:1494: testing Calculator glr2.cc %no-lines %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs}  ...
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
-Reading a token
-Next token is token ')' (1.7: )
-Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
 Entering state 8
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
+Next token is token '=' ()
+Shifting token '=' ()
 Entering state 18
 Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
 Entering state 27
 Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
+Next token is token '=' ()
+syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'])
+Error: popping nterm exp (2)
+Error: popping token '=' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '=' ()
+stderr:
+./calc.at:1494: mv calc.y.tmp calc.y
+
+Starting parse
+Entering state 0
 Reading a token
 Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" (1.1: )
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" (1.1: )
+./calc.at:1487: cat stderr
 ./calc.at:1491: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -213484,11 +209124,58 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input:
+  | 
+  | +1
+./calc.at:1487:  $PREPARSER ./calc  input
 ./calc.at:1491: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 87):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 82):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '+' ()
+syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
+Error: popping nterm input ()
+Cleanup: discarding lookahead token '+' ()
+./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 87):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 82):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '+' ()
+syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!'])
+Error: popping nterm input ()
+Cleanup: discarding lookahead token '+' ()
 input:
-  | (# + 1) = 1111
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 ./calc.at:1491:  $PREPARSER ./calc  input
 stderr:
+stderr:
+stdout:
 Starting parse
 Entering state 0
 Reading a token
@@ -213496,127 +209183,234 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
+Next token is token ')' (1.2: )
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
 Shifting token error (1.2: )
 Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Reading a token
-Next token is token "number" (1.6: 1)
-Error: discarding token "number" (1.6: 1)
-Reading a token
-Next token is token ')' (1.7: )
-Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
+Next token is token ')' (1.2: )
+Shifting token ')' (1.2: )
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+   $2 = token error (1.2: )
+   $3 = token ')' (1.2: )
+-> $$ = nterm exp (1.1-2: 1111)
 Entering state 8
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 18
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
 Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
+Next token is token '(' (1.6: )
+Shifting token '(' (1.6: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.7: 1)
+Shifting token "number" (1.7: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 27
+   $1 = token "number" (1.7: 1)
+-> $$ = nterm exp (1.7: 1)
+Entering state 12
 Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
+Next token is token '+' (1.9: )
+Shifting token '+' (1.9: )
+Entering state 20
 Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
+Next token is token "number" (1.11: 1)
+Shifting token "number" (1.11: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.11: 1)
+-> $$ = nterm exp (1.11: 1)
+Entering state 29
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.7: 1)
+   $2 = token '+' (1.9: )
+   $3 = nterm exp (1.11: 1)
+-> $$ = nterm exp (1.7-11: 2)
+Entering state 12
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.15: 1)
+Shifting token "number" (1.15: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.15: 1)
+-> $$ = nterm exp (1.15: 1)
+Entering state 29
+Reading a token
+Next token is token '+' (1.17: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.7-11: 2)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15: 1)
+-> $$ = nterm exp (1.7-15: 3)
+Entering state 12
+Next token is token '+' (1.17: )
+Shifting token '+' (1.17: )
+Entering state 20
+Reading a token
+Next token is token ')' (1.18: )
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' (1.17: )
+Error: popping nterm exp (1.7-15: 3)
+Shifting token error (1.7-18: )
+Entering state 11
+Next token is token ')' (1.18: )
+Shifting token ')' (1.18: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.6: )
+   $2 = token error (1.7-18: )
+   $3 = token ')' (1.18: )
+-> $$ = nterm exp (1.6-18: 1111)
+Entering state 29
+Reading a token
+Next token is token '+' (1.20: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-2: 1111)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6-18: 1111)
+-> $$ = nterm exp (1.1-18: 2222)
+Entering state 8
+Next token is token '+' (1.20: )
+Shifting token '+' (1.20: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.22: )
+Shifting token '(' (1.22: )
 Entering state 4
 Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
+Next token is token '*' (1.23: )
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.23: )
 Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
+Next token is token '*' (1.23: )
+Error: discarding token '*' (1.23: )
 Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
+Next token is token '*' (1.25: )
+Error: discarding token '*' (1.25: )
 Reading a token
-Next token is token "number" (1.6: 1)
-Error: discarding token "number" (1.6: 1)
+Next token is token '*' (1.27: )
+Error: discarding token '*' (1.27: )
 Reading a token
-Next token is token ')' (1.7: )
+Next token is token ')' (1.28: )
 Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
+Next token is token ')' (1.28: )
+Shifting token ')' (1.28: )
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+   $1 = token '(' (1.22: )
+   $2 = token error (1.23-27: )
+   $3 = token ')' (1.28: )
+-> $$ = nterm exp (1.22-28: 1111)
+Entering state 29
+Reading a token
+Next token is token '+' (1.30: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-18: 2222)
+   $2 = token '+' (1.20: )
+   $3 = nterm exp (1.22-28: 1111)
+-> $$ = nterm exp (1.1-28: 3333)
 Entering state 8
+Next token is token '+' (1.30: )
+Shifting token '+' (1.30: )
+Entering state 20
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
+Next token is token '(' (1.32: )
+Shifting token '(' (1.32: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.33: 1)
+Shifting token "number" (1.33: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.33: 1)
+-> $$ = nterm exp (1.33: 1)
+Entering state 12
+Reading a token
+Next token is token '*' (1.35: )
+Shifting token '*' (1.35: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.37: 2)
+Shifting token "number" (1.37: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.37: 2)
+-> $$ = nterm exp (1.37: 2)
+Entering state 30
+Reading a token
+Next token is token '*' (1.39: )
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1.33: 1)
+   $2 = token '*' (1.35: )
+   $3 = nterm exp (1.37: 2)
+-> $$ = nterm exp (1.33-37: 2)
+Entering state 12
+Next token is token '*' (1.39: )
+Shifting token '*' (1.39: )
+Entering state 21
+Reading a token
+Next token is token '*' (1.41: )
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' (1.39: )
+Error: popping nterm exp (1.33-37: 2)
+Shifting token error (1.33-41: )
+Entering state 11
+Next token is token '*' (1.41: )
+Error: discarding token '*' (1.41: )
+Reading a token
+Next token is token ')' (1.42: )
+Entering state 11
+Next token is token ')' (1.42: )
+Shifting token ')' (1.42: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.32: )
+   $2 = token error (1.33-41: )
+   $3 = token ')' (1.42: )
+-> $$ = nterm exp (1.32-42: 1111)
+Entering state 29
+Reading a token
+Next token is token '=' (1.44: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-28: 3333)
+   $2 = token '+' (1.30: )
+   $3 = nterm exp (1.32-42: 1111)
+-> $$ = nterm exp (1.1-42: 4444)
+Entering state 8
+Next token is token '=' (1.44: )
+Shifting token '=' (1.44: )
 Entering state 18
 Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
+Next token is token "number" (1.46: 1)
+Shifting token "number" (1.46: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
+   $1 = token "number" (1.46: 1)
+-> $$ = nterm exp (1.46: 1)
 Entering state 27
 Reading a token
-Next token is token '\n' (1.15-2.0: )
+Next token is token '\n' (1.47-2.0: )
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+   $1 = nterm exp (1.1-42: 4444)
+   $2 = token '=' (1.44: )
+   $3 = nterm exp (1.46: 1)
+1.1-46: error: 4444 != 1
+-> $$ = nterm exp (1.1-46: 4444)
 Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Next token is token '\n' (1.47-2.0: )
+Shifting token '\n' (1.47-2.0: )
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
+   $1 = nterm exp (1.1-46: 4444)
+   $2 = token '\n' (1.47-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
@@ -213629,8 +209423,6 @@
 Entering state 16
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-stderr:
-stdout:
 ./calc.at:1489: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
@@ -213642,38 +209434,11 @@
         || /\t/
         )' calc.cc
 
-./calc.at:1491: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-577. calc.at:1517: testing Calculator D %locations parse.error=custom  ...
-./calc.at:1517: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
-./calc.at:1491: cat stderr
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1489:  $PREPARSER ./calc  input
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1491:  $PREPARSER ./calc  input
+./calc.at:1487: cat stderr
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1487:  $PREPARSER ./calc  /dev/null
+input:
 Starting parse
 Entering state 0
 Reading a token
@@ -213681,70 +209446,234 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
+Next token is token ')' (1.2: )
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Next token is token ')' (1.2: )
+Shifting token ')' (1.2: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.2: )
+-> $$ = nterm exp (1.1-2: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.6: )
+Shifting token '(' (1.6: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.7: 1)
+Shifting token "number" (1.7: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
+   $1 = token "number" (1.7: 1)
+-> $$ = nterm exp (1.7: 1)
 Entering state 12
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
+Next token is token '+' (1.9: )
+Shifting token '+' (1.9: )
 Entering state 20
 Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
+Next token is token "number" (1.11: 1)
+Shifting token "number" (1.11: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.11: 1)
+-> $$ = nterm exp (1.11: 1)
+Entering state 29
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.7: 1)
+   $2 = token '+' (1.9: )
+   $3 = nterm exp (1.11: 1)
+-> $$ = nterm exp (1.7-11: 2)
+Entering state 12
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.15: 1)
+Shifting token "number" (1.15: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.15: 1)
+-> $$ = nterm exp (1.15: 1)
+Entering state 29
+Reading a token
+Next token is token '+' (1.17: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.7-11: 2)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15: 1)
+-> $$ = nterm exp (1.7-15: 3)
+Entering state 12
+Next token is token '+' (1.17: )
+Shifting token '+' (1.17: )
+Entering state 20
+Reading a token
+Next token is token ')' (1.18: )
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' (1.17: )
+Error: popping nterm exp (1.7-15: 3)
+Shifting token error (1.7-18: )
 Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
+Next token is token ')' (1.18: )
+Shifting token ')' (1.18: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.6: )
+   $2 = token error (1.7-18: )
+   $3 = token ')' (1.18: )
+-> $$ = nterm exp (1.6-18: 1111)
+Entering state 29
 Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
+Next token is token '+' (1.20: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-2: 1111)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6-18: 1111)
+-> $$ = nterm exp (1.1-18: 2222)
+Entering state 8
+Next token is token '+' (1.20: )
+Shifting token '+' (1.20: )
+Entering state 20
 Reading a token
-Next token is token "number" (1.10: 1)
-Error: discarding token "number" (1.10: 1)
+Next token is token '(' (1.22: )
+Shifting token '(' (1.22: )
+Entering state 4
 Reading a token
-Next token is token ')' (1.11: )
+Next token is token '*' (1.23: )
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.23: )
 Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
+Next token is token '*' (1.23: )
+Error: discarding token '*' (1.23: )
+Reading a token
+Next token is token '*' (1.25: )
+Error: discarding token '*' (1.25: )
+Reading a token
+Next token is token '*' (1.27: )
+Error: discarding token '*' (1.27: )
+Reading a token
+Next token is token ')' (1.28: )
+Entering state 11
+Next token is token ')' (1.28: )
+Shifting token ')' (1.28: )
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
+   $1 = token '(' (1.22: )
+   $2 = token error (1.23-27: )
+   $3 = token ')' (1.28: )
+-> $$ = nterm exp (1.22-28: 1111)
+Entering state 29
+Reading a token
+Next token is token '+' (1.30: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-18: 2222)
+   $2 = token '+' (1.20: )
+   $3 = nterm exp (1.22-28: 1111)
+-> $$ = nterm exp (1.1-28: 3333)
 Entering state 8
+Next token is token '+' (1.30: )
+Shifting token '+' (1.30: )
+Entering state 20
 Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
+Next token is token '(' (1.32: )
+Shifting token '(' (1.32: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.33: 1)
+Shifting token "number" (1.33: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.33: 1)
+-> $$ = nterm exp (1.33: 1)
+Entering state 12
+Reading a token
+Next token is token '*' (1.35: )
+Shifting token '*' (1.35: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.37: 2)
+Shifting token "number" (1.37: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.37: 2)
+-> $$ = nterm exp (1.37: 2)
+Entering state 30
+Reading a token
+Next token is token '*' (1.39: )
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1.33: 1)
+   $2 = token '*' (1.35: )
+   $3 = nterm exp (1.37: 2)
+-> $$ = nterm exp (1.33-37: 2)
+Entering state 12
+Next token is token '*' (1.39: )
+Shifting token '*' (1.39: )
+Entering state 21
+Reading a token
+Next token is token '*' (1.41: )
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' (1.39: )
+Error: popping nterm exp (1.33-37: 2)
+Shifting token error (1.33-41: )
+Entering state 11
+Next token is token '*' (1.41: )
+Error: discarding token '*' (1.41: )
+Reading a token
+Next token is token ')' (1.42: )
+Entering state 11
+Next token is token ')' (1.42: )
+Shifting token ')' (1.42: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.32: )
+   $2 = token error (1.33-41: )
+   $3 = token ')' (1.42: )
+-> $$ = nterm exp (1.32-42: 1111)
+Entering state 29
+Reading a token
+Next token is token '=' (1.44: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-28: 3333)
+   $2 = token '+' (1.30: )
+   $3 = nterm exp (1.32-42: 1111)
+-> $$ = nterm exp (1.1-42: 4444)
+Entering state 8
+Next token is token '=' (1.44: )
+Shifting token '=' (1.44: )
 Entering state 18
 Reading a token
-Next token is token "number" (1.15-18: 1111)
-Shifting token "number" (1.15-18: 1111)
+Next token is token "number" (1.46: 1)
+Shifting token "number" (1.46: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
+   $1 = token "number" (1.46: 1)
+-> $$ = nterm exp (1.46: 1)
 Entering state 27
 Reading a token
-Next token is token '\n' (1.19-2.0: )
+Next token is token '\n' (1.47-2.0: )
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
+   $1 = nterm exp (1.1-42: 4444)
+   $2 = token '=' (1.44: )
+   $3 = nterm exp (1.46: 1)
+1.1-46: error: 4444 != 1
+-> $$ = nterm exp (1.1-46: 4444)
 Entering state 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
+Next token is token '\n' (1.47-2.0: )
+Shifting token '\n' (1.47-2.0: )
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
+   $1 = nterm exp (1.1-46: 4444)
+   $2 = token '\n' (1.47-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
@@ -213758,6 +209687,45 @@
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 stderr:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+Cleanup: discarding lookahead token end of file ()
+./calc.at:1489:  $PREPARSER ./calc  input
+./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1491: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!'])
+Cleanup: discarding lookahead token end of file ()
+stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -214594,9 +210562,8 @@
 Entering state 16
 Cleanup: popping token end of input ()
 Cleanup: popping nterm input ()
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+./calc.at:1491: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -215434,93 +211401,11 @@
 Entering state 16
 Cleanup: popping token end of input ()
 Cleanup: popping nterm input ()
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Reading a token
-Next token is token "number" (1.10: 1)
-Error: discarding token "number" (1.10: 1)
-Reading a token
-Next token is token ')' (1.11: )
-Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 18
-Reading a token
-Next token is token "number" (1.15-18: 1111)
-Shifting token "number" (1.15-18: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
 input:
   | 1 2
 ./calc.at:1489:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1487: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -215536,39 +211421,16 @@
 syntax error, unexpected number
 Error: popping nterm exp (1)
 Cleanup: discarding lookahead token number (2)
-./calc.at:1491: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1491: cat stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token number (2)
-syntax error, unexpected number
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token number (2)
 input:
-  | (1 + 1) / (1 - 1)
+  | (!!) + (1 2) = 1
 ./calc.at:1491:  $PREPARSER ./calc  input
-577. calc.at:1517: stderr:
- skipped (calc.at:1517)
+input:
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1487:  $PREPARSER ./calc  input
+stderr:
+stderr:
+stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -215576,190 +211438,75 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
+Next token is token '!' (1.2: )
+Shifting token '!' (1.2: )
+Entering state 5
 Reading a token
-Next token is token "number" (1.6: 1)
-Shifting token "number" (1.6: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 29
+Next token is token '!' (1.3: )
+Shifting token '!' (1.3: )
+Entering state 15
+Reducing stack 0 by rule 16 (line 107):
+   $1 = token '!' (1.2: )
+   $2 = token '!' (1.3: )
+Shifting token error (1.2-3: )
+Entering state 11
 Reading a token
-Next token is token ')' (1.7: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+Next token is token ')' (1.4: )
+Shifting token ')' (1.4: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
-Entering state 8
-Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 22
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.12: 1)
-Shifting token "number" (1.12: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.16: 1)
-Shifting token "number" (1.16: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 28
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 31
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 10 (line 93):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
+   $2 = token error (1.2-3: )
+   $3 = token ')' (1.4: )
+-> $$ = nterm exp (1.1-4: 1111)
 Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
 Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1489: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-
-stderr:
-Starting parse
-Entering state 0
+Next token is token '+' (1.6: )
+Shifting token '+' (1.6: )
+Entering state 20
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
+Next token is token '(' (1.8: )
+Shifting token '(' (1.8: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
+Next token is token "number" (1.9: 1)
+Shifting token "number" (1.9: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
+   $1 = token "number" (1.9: 1)
+-> $$ = nterm exp (1.9: 1)
 Entering state 12
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
+Next token is token "number" (1.11: 2)
+1.11: syntax error, unexpected number
+Error: popping nterm exp (1.9: 1)
+Shifting token error (1.9-11: )
+Entering state 11
+Next token is token "number" (1.11: 2)
+Error: discarding token "number" (1.11: 2)
 Reading a token
-Next token is token "number" (1.6: 1)
-Shifting token "number" (1.6: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.6: 1)
--> $$ = nterm exp (1.6: 1)
+Next token is token ')' (1.12: )
+Entering state 11
+Next token is token ')' (1.12: )
+Shifting token ')' (1.12: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.8: )
+   $2 = token error (1.9-11: )
+   $3 = token ')' (1.12: )
+-> $$ = nterm exp (1.8-12: 1111)
 Entering state 29
 Reading a token
-Next token is token ')' (1.7: )
+Next token is token '=' (1.14: )
 Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
+   $1 = nterm exp (1.1-4: 1111)
+   $2 = token '+' (1.6: )
+   $3 = nterm exp (1.8-12: 1111)
+-> $$ = nterm exp (1.1-12: 2222)
 Entering state 8
-Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 22
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.12: 1)
-Shifting token "number" (1.12: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 19
+Next token is token '=' (1.14: )
+Shifting token '=' (1.14: )
+Entering state 18
 Reading a token
 Next token is token "number" (1.16: 1)
 Shifting token "number" (1.16: 1)
@@ -215767,39 +211514,22 @@
 Reducing stack 0 by rule 5 (line 79):
    $1 = token "number" (1.16: 1)
 -> $$ = nterm exp (1.16: 1)
-Entering state 28
+Entering state 27
 Reading a token
-Next token is token ')' (1.17: )
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
+Next token is token '\n' (1.17-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-12: 2222)
+   $2 = token '=' (1.14: )
    $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 31
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 10 (line 93):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
+1.1-16: error: 2222 != 1
+-> $$ = nterm exp (1.1-16: 2222)
 Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
@@ -215812,141 +211542,7 @@
 Entering state 16
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1489: cat stderr
-input:
-./calc.at:1491: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-  | 1//2
-./calc.at:1489:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Reading a token
-Next token is token '/' ()
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '/' ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1491: cat stderr
-stderr:
-565. calc.at:1491: Starting parse
-Entering state 0
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '/' ()
-Shifting token '/' ()
-Entering state 22
-Reading a token
-Next token is token '/' ()
-syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '/' ()
- ok
-./calc.at:1489: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1489: cat stderr
-input:
-  | error
-./calc.at:1489:  $PREPARSER ./calc  input
-
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token invalid token ()
-syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token invalid token ()
-syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token ()
-./calc.at:1489: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1489: cat stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1489:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token number (1)
-Shifting token number (1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
-Entering state 8
-Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
-Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 27
-Reading a token
-Next token is token '=' ()
-syntax error, unexpected '='
-Error: popping nterm exp (2)
-Error: popping token '=' ()
-Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '=' ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -215958,129 +211554,10 @@
 -> $$ = nterm exp (1)
 Entering state 8
 Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
-Entering state 18
-Reading a token
 Next token is token number (2)
-Shifting token number (2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (2)
--> $$ = nterm exp (2)
-Entering state 27
-Reading a token
-Next token is token '=' ()
-syntax error, unexpected '='
-Error: popping nterm exp (2)
-Error: popping token '=' ()
+syntax error, unexpected number
 Error: popping nterm exp (1)
-Cleanup: discarding lookahead token '=' ()
-578. calc.at:1518: testing Calculator D %locations parse.error=detailed  ...
-./calc.at:1518: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
-./calc.at:1489: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1489: cat stderr
-input:
-  | 
-  | +1
-./calc.at:1489:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 74):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '+' ()
-syntax error, unexpected '+'
-Error: popping nterm input ()
-Cleanup: discarding lookahead token '+' ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '\n' ()
-Shifting token '\n' ()
-Entering state 3
-Reducing stack 0 by rule 3 (line 74):
-   $1 = token '\n' ()
--> $$ = nterm line ()
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
-Entering state 6
-Reading a token
-Next token is token '+' ()
-syntax error, unexpected '+'
-Error: popping nterm input ()
-Cleanup: discarding lookahead token '+' ()
-./calc.at:1489: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-579. calc.at:1519: testing Calculator D %locations parse.error=simple  ...
-./calc.at:1519: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
-./calc.at:1489: cat stderr
-./calc.at:1489:  $PREPARSER ./calc  /dev/null
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-syntax error, unexpected end of input
-Cleanup: discarding lookahead token end of input ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-syntax error, unexpected end of input
-Cleanup: discarding lookahead token end of input ()
-578. calc.at:1518:  skipped (calc.at:1518)
-
-./calc.at:1489: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-579. calc.at:1519: ./calc.at:1489: cat stderr
- skipped (calc.at:1519)
-
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1489:  $PREPARSER ./calc  input
+Cleanup: discarding lookahead token number (2)
 stderr:
 Starting parse
 Entering state 0
@@ -216090,13 +211567,13 @@
 Entering state 4
 Reading a token
 Next token is token ')' ()
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
 Shifting token error ()
 Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -216114,7 +211591,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -216126,13 +211603,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 29
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (1)
@@ -216145,13 +211622,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 29
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (2)
    $2 = token '+' ()
    $3 = nterm exp (1)
@@ -216162,7 +211639,7 @@
 Entering state 20
 Reading a token
 Next token is token ')' ()
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
 Error: popping token '+' ()
 Error: popping nterm exp (3)
 Shifting token error ()
@@ -216170,7 +211647,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -216178,7 +211655,7 @@
 Entering state 29
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1111)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -216193,7 +211670,7 @@
 Entering state 4
 Reading a token
 Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 Shifting token error ()
 Entering state 11
 Next token is token '*' ()
@@ -216210,7 +211687,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -216218,7 +211695,7 @@
 Entering state 29
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (2222)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -216235,7 +211712,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -216247,13 +211724,13 @@
 Next token is token number (2)
 Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 30
 Reading a token
 Next token is token '*' ()
-Reducing stack 0 by rule 9 (line 92):
+Reducing stack 0 by rule 9 (line 105):
    $1 = nterm exp (1)
    $2 = token '*' ()
    $3 = nterm exp (2)
@@ -216264,7 +211741,7 @@
 Entering state 21
 Reading a token
 Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 Error: popping token '*' ()
 Error: popping nterm exp (2)
 Shifting token error ()
@@ -216277,7 +211754,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -216285,7 +211762,7 @@
 Entering state 29
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (3333)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -216298,13 +211775,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (4444)
    $2 = token '=' ()
    $3 = nterm exp (1)
@@ -216314,23 +211791,145 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (4444)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1494: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '!' (1.2: )
+Shifting token '!' (1.2: )
+Entering state 5
+Reading a token
+Next token is token '!' (1.3: )
+Shifting token '!' (1.3: )
+Entering state 15
+Reducing stack 0 by rule 16 (line 107):
+   $1 = token '!' (1.2: )
+   $2 = token '!' (1.3: )
+Shifting token error (1.2-3: )
+Entering state 11
+Reading a token
+Next token is token ')' (1.4: )
+Shifting token ')' (1.4: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-3: )
+   $3 = token ')' (1.4: )
+-> $$ = nterm exp (1.1-4: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.6: )
+Shifting token '+' (1.6: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.8: )
+Shifting token '(' (1.8: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.9: 1)
+Shifting token "number" (1.9: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.9: 1)
+-> $$ = nterm exp (1.9: 1)
+Entering state 12
+Reading a token
+Next token is token "number" (1.11: 2)
+1.11: syntax error, unexpected number
+Error: popping nterm exp (1.9: 1)
+Shifting token error (1.9-11: )
+Entering state 11
+Next token is token "number" (1.11: 2)
+Error: discarding token "number" (1.11: 2)
+Reading a token
+Next token is token ')' (1.12: )
+Entering state 11
+Next token is token ')' (1.12: )
+Shifting token ')' (1.12: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.8: )
+   $2 = token error (1.9-11: )
+   $3 = token ')' (1.12: )
+-> $$ = nterm exp (1.8-12: 1111)
+Entering state 29
+Reading a token
+Next token is token '=' (1.14: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-4: 1111)
+   $2 = token '+' (1.6: )
+   $3 = nterm exp (1.8-12: 1111)
+-> $$ = nterm exp (1.1-12: 2222)
+Entering state 8
+Next token is token '=' (1.14: )
+Shifting token '=' (1.14: )
+Entering state 18
+Reading a token
+Next token is token "number" (1.16: 1)
+Shifting token "number" (1.16: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-12: 2222)
+   $2 = token '=' (1.14: )
+   $3 = nterm exp (1.16: 1)
+1.1-16: error: 2222 != 1
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 stderr:
+./calc.at:1491: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Reading a token
@@ -216339,13 +211938,13 @@
 Entering state 4
 Reading a token
 Next token is token ')' ()
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
 Shifting token error ()
 Entering state 11
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -216363,7 +211962,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -216375,13 +211974,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 29
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (1)
@@ -216394,13 +211993,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 29
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (2)
    $2 = token '+' ()
    $3 = nterm exp (1)
@@ -216411,7 +212010,7 @@
 Entering state 20
 Reading a token
 Next token is token ')' ()
-syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+syntax error on token [')'] (expected: [number] ['-'] ['('] ['!'])
 Error: popping token '+' ()
 Error: popping nterm exp (3)
 Shifting token error ()
@@ -216419,7 +212018,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -216427,7 +212026,7 @@
 Entering state 29
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1111)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -216442,7 +212041,7 @@
 Entering state 4
 Reading a token
 Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 Shifting token error ()
 Entering state 11
 Next token is token '*' ()
@@ -216459,7 +212058,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -216467,7 +212066,7 @@
 Entering state 29
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (2222)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -216484,7 +212083,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -216496,13 +212095,13 @@
 Next token is token number (2)
 Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 30
 Reading a token
 Next token is token '*' ()
-Reducing stack 0 by rule 9 (line 92):
+Reducing stack 0 by rule 9 (line 105):
    $1 = nterm exp (1)
    $2 = token '*' ()
    $3 = nterm exp (2)
@@ -216513,7 +212112,7 @@
 Entering state 21
 Reading a token
 Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 Error: popping token '*' ()
 Error: popping nterm exp (2)
 Shifting token error ()
@@ -216526,7 +212125,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -216534,7 +212133,7 @@
 Entering state 29
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (3333)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -216547,13 +212146,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (4444)
    $2 = token '=' ()
    $3 = nterm exp (1)
@@ -216563,25 +212162,21 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (4444)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-580. calc.at:1520: testing Calculator D parse.error=detailed %debug %verbose  ...
-./calc.at:1520: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
-581. calc.at:1521: testing Calculator D parse.error=detailed %debug api.symbol.prefix={SYMB_} api.token.prefix={TOK_} %verbose  ...
-./calc.at:1521: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
 ./calc.at:1489: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -216592,252 +212187,253 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1489: cat stderr
+./calc.at:1491: cat stderr
+./calc.at:1487: cat stderr
 input:
-  | (!!) + (1 2) = 1
-./calc.at:1489:  $PREPARSER ./calc  input
+  | (- *) + (1 2) = 1
+./calc.at:1491:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1489: cat stderr
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
+Entering state 2
 Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 15
-Reducing stack 0 by rule 16 (line 107):
-   $1 = token '!' ()
-   $2 = token '!' ()
-Shifting token error ()
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Reducing stack 0 by rule 15 (line 106):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
+Shifting token error (1.2-4: )
 Entering state 11
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
 Reading a token
-Next token is token ')' ()
-Shifting token ')' ()
+Next token is token ')' (1.5: )
+Entering state 11
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
 Entering state 8
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
 Entering state 20
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
 Entering state 4
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1.10: 1)
+Shifting token "number" (1.10: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+   $1 = token "number" (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
 Entering state 12
 Reading a token
-Next token is token number (2)
-syntax error, unexpected number
-Error: popping nterm exp (1)
-Shifting token error ()
+Next token is token "number" (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
+Shifting token error (1.10-12: )
 Entering state 11
-Next token is token number (2)
-Error: discarding token number (2)
+Next token is token "number" (1.12: 2)
+Error: discarding token "number" (1.12: 2)
 Reading a token
-Next token is token ')' ()
+Next token is token ')' (1.13: )
 Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
 Entering state 29
 Reading a token
-Next token is token '=' ()
+Next token is token '=' (1.15: )
 Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
 Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
 Entering state 18
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1.17: 1)
+Shifting token "number" (1.17: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+   $1 = token "number" (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
 Entering state 27
 Reading a token
-Next token is token '\n' ()
+Next token is token '\n' (1.18-2.0: )
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (2222)
-   $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 2222 != 1
--> $$ = nterm exp (2222)
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
 Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2222)
-   $2 = token '\n' ()
--> $$ = nterm line ()
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token "end of input" (2.1: )
 Entering state 16
-Cleanup: popping token end of input ()
-Cleanup: popping nterm input ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+input:
+stderr:
+  | (!!) + (1 2) = 1
+./calc.at:1487:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 5
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
+Entering state 2
 Reading a token
-Next token is token '!' ()
-Shifting token '!' ()
-Entering state 15
-Reducing stack 0 by rule 16 (line 107):
-   $1 = token '!' ()
-   $2 = token '!' ()
-Shifting token error ()
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Reducing stack 0 by rule 15 (line 106):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
+Shifting token error (1.2-4: )
 Entering state 11
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
 Reading a token
-Next token is token ')' ()
-Shifting token ')' ()
+Next token is token ')' (1.5: )
+Entering state 11
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
 Entering state 8
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
 Entering state 20
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
 Entering state 4
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1.10: 1)
+Shifting token "number" (1.10: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+   $1 = token "number" (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
 Entering state 12
 Reading a token
-Next token is token number (2)
-syntax error, unexpected number
-Error: popping nterm exp (1)
-Shifting token error ()
+Next token is token "number" (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
+Shifting token error (1.10-12: )
 Entering state 11
-Next token is token number (2)
-Error: discarding token number (2)
+Next token is token "number" (1.12: 2)
+Error: discarding token "number" (1.12: 2)
 Reading a token
-Next token is token ')' ()
+Next token is token ')' (1.13: )
 Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
 Entering state 29
 Reading a token
-Next token is token '=' ()
+Next token is token '=' (1.15: )
 Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1111)
-   $2 = token '+' ()
-   $3 = nterm exp (1111)
--> $$ = nterm exp (2222)
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
 Entering state 8
-Next token is token '=' ()
-Shifting token '=' ()
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
 Entering state 18
 Reading a token
-Next token is token number (1)
-Shifting token number (1)
+Next token is token "number" (1.17: 1)
+Shifting token "number" (1.17: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1)
--> $$ = nterm exp (1)
+   $1 = token "number" (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
 Entering state 27
 Reading a token
-Next token is token '\n' ()
+Next token is token '\n' (1.18-2.0: )
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (2222)
-   $2 = token '=' ()
-   $3 = nterm exp (1)
-error: 2222 != 1
--> $$ = nterm exp (2222)
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
 Entering state 8
-Next token is token '\n' ()
-Shifting token '\n' ()
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2222)
-   $2 = token '\n' ()
--> $$ = nterm line ()
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line ()
--> $$ = nterm input ()
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token "end of input" (2.1: )
 Entering state 16
-Cleanup: popping token end of input ()
-Cleanup: popping nterm input ()
-./calc.at:1489: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1489: cat stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1489:  $PREPARSER ./calc  input
-580. calc.at:1520:  skipped (calc.at:1520)
-581. calc.at:1521:  skipped (calc.at:1521)
-stderr:
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+  | 1//2
 Starting parse
 Entering state 0
 Reading a token
@@ -216845,28 +212441,23 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
 Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 9
-Reducing stack 0 by rule 15 (line 106):
-   $1 = token '-' ()
-   $2 = token error ()
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 15
+Reducing stack 0 by rule 16 (line 120):
+   $1 = token '!' ()
+   $2 = token '!' ()
 Shifting token error ()
 Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
 Reading a token
 Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -216884,13 +212475,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
 Reading a token
 Next token is token number (2)
-syntax error, unexpected number
+syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
 Error: popping nterm exp (1)
 Shifting token error ()
 Entering state 11
@@ -216902,7 +212493,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -216910,7 +212501,7 @@
 Entering state 29
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1111)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -216923,13 +212514,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (2222)
    $2 = token '=' ()
    $3 = nterm exp (1)
@@ -216939,23 +212530,56 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
+./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1489:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
+Reading a token
+Next token is token '/' ()
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '/' ()
 ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-
+stderr:
+./calc.at:1491: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -216964,28 +212588,23 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
-Next token is token '-' ()
-Shifting token '-' ()
-Entering state 2
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
 Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
-Entering state 9
-Reducing stack 0 by rule 15 (line 106):
-   $1 = token '-' ()
-   $2 = token error ()
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 15
+Reducing stack 0 by rule 16 (line 120):
+   $1 = token '!' ()
+   $2 = token '!' ()
 Shifting token error ()
 Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
 Reading a token
 Next token is token ')' ()
-Entering state 11
-Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -217003,13 +212622,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
 Reading a token
 Next token is token number (2)
-syntax error, unexpected number
+syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
 Error: popping nterm exp (1)
 Shifting token error ()
 Entering state 11
@@ -217021,7 +212640,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -217029,7 +212648,7 @@
 Entering state 29
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1111)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -217042,13 +212661,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (2222)
    $2 = token '=' ()
    $3 = nterm exp (1)
@@ -217058,22 +212677,43 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
+Reading a token
+Next token is token '/' ()
+syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '/' ()
+./calc.at:1491: cat stderr
+input:
 ./calc.at:1489: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -217084,55 +212724,294 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1489: cat stderr
-input:
   | (* *) + (*) + (*)
-./calc.at:1489:  $PREPARSER ./calc  input
-582. calc.at:1523: testing Calculator D %locations parse.lac=full parse.error=detailed  ...
+./calc.at:1491:  $PREPARSER ./calc  input
 stderr:
-./calc.at:1523: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
 Entering state 11
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
 Reading a token
-Next token is token '*' ()
-Error: discarding token '*' ()
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
 Reading a token
-Next token is token ')' ()
+Next token is token ')' (1.5: )
 Entering state 11
-Next token is token ')' ()
-Shifting token ')' ()
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' ()
-   $2 = token error ()
-   $3 = token ')' ()
--> $$ = nterm exp (1111)
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
 Entering state 8
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
 Entering state 20
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
 Entering state 4
 Reading a token
-Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error ()
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
 Entering state 11
-Next token is token '*' ()
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
+Reading a token
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.9-11: 1111)
+Entering state 29
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
+Entering state 8
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
+Reading a token
+Next token is token ')' (1.17: )
+Entering state 11
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 29
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
+Entering state 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-17: 3333)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1487: cat stderr
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1489: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
+Reading a token
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Reading a token
+Next token is token ')' (1.5: )
+Entering state 11
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
+Entering state 11
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
+Reading a token
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.9-11: 1111)
+Entering state 29
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
+Entering state 8
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
+Reading a token
+Next token is token ')' (1.17: )
+Entering state 11
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 29
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
+Entering state 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-17: 3333)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+input:
+input:
+  | error
+./calc.at:1489:  $PREPARSER ./calc  input
+stderr:
+  | (- *) + (1 2) = 1
+./calc.at:1487:  $PREPARSER ./calc  input
+./calc.at:1491: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1491: cat stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token invalid token ()
+syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token ()
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1491:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
+Reading a token
+Next token is token '*' ()
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+Shifting token error ()
+Entering state 9
+Reducing stack 0 by rule 15 (line 119):
+   $1 = token '-' ()
+   $2 = token error ()
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
 Error: discarding token '*' ()
 Reading a token
 Next token is token ')' ()
@@ -217140,30 +213019,186 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
-Entering state 29
+Entering state 8
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token number (2)
+syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token number (2)
+Error: discarding token number (2)
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 118):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1111)
    $2 = token '+' ()
    $3 = nterm exp (1111)
 -> $$ = nterm exp (2222)
 Entering state 8
-Next token is token '+' ()
-Shifting token '+' ()
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (2222)
+   $2 = token '=' ()
+   $3 = nterm exp (1)
+error: 2222 != 1
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (2222)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 82):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of file ()
+Entering state 16
+Cleanup: popping token end of file ()
+Cleanup: popping nterm input ()
+./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
 Entering state 20
 Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Reducing stack 0 by rule 17 (line 108):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+Starting parse
+Entering state 0
+Reading a token
+Next token is token invalid token ()
+syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token ()
+Starting parse
+Entering state 0
+Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
 Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
+Reading a token
 Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+Shifting token error ()
+Entering state 9
+Reducing stack 0 by rule 15 (line 119):
+   $1 = token '-' ()
+   $2 = token error ()
 Shifting token error ()
 Entering state 11
 Next token is token '*' ()
@@ -217174,40 +213209,388 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token number (2)
+syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')'])
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token number (2)
+Error: discarding token number (2)
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
 -> $$ = nterm exp (1111)
 Entering state 29
 Reading a token
-Next token is token '\n' ()
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (2222)
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 103):
+   $1 = nterm exp (1111)
    $2 = token '+' ()
    $3 = nterm exp (1111)
--> $$ = nterm exp (3333)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 93):
+   $1 = nterm exp (2222)
+   $2 = token '=' ()
+   $3 = nterm exp (1)
+error: 2222 != 1
+-> $$ = nterm exp (2222)
 Entering state 8
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (3333)
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 20
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Reducing stack 0 by rule 17 (line 108):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+input:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1491:  $PREPARSER ./calc  input
+./calc.at:1489: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 20
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Reducing stack 0 by rule 18 (line 109):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1487: cat stderr
+./calc.at:1489: cat stderr
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+input:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 20
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Reducing stack 0 by rule 18 (line 109):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+  | 1 = 2 = 3
+./calc.at:1489:  $PREPARSER ./calc  input
+input:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 27
+Reading a token
+Next token is token '=' ()
+syntax error, unexpected '='
+Error: popping nterm exp (2)
+Error: popping token '=' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '=' ()
 ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (* *) + (*) + (*)
+./calc.at:1487:  $PREPARSER ./calc  input
+./calc.at:1491: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
 stderr:
+./calc.at:1491: cat stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 27
+Reading a token
+Next token is token '=' ()
+syntax error, unexpected '='
+Error: popping nterm exp (2)
+Error: popping token '=' ()
+Error: popping nterm exp (1)
+Cleanup: discarding lookahead token '=' ()
 Starting parse
 Entering state 0
 Reading a token
@@ -217216,7 +213599,7 @@
 Entering state 4
 Reading a token
 Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 Shifting token error ()
 Entering state 11
 Next token is token '*' ()
@@ -217230,7 +213613,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -217246,7 +213629,7 @@
 Entering state 4
 Reading a token
 Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 Shifting token error ()
 Entering state 11
 Next token is token '*' ()
@@ -217257,7 +213640,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -217265,7 +213648,7 @@
 Entering state 29
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1111)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -217280,7 +213663,7 @@
 Entering state 4
 Reading a token
 Next token is token '*' ()
-syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
 Shifting token error ()
 Entering state 11
 Next token is token '*' ()
@@ -217291,7 +213674,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -217299,7 +213682,7 @@
 Entering state 29
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (2222)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -217308,23 +213691,26 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (3333)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-583. calc.at:1524: testing Calculator D %locations parse.lac=full parse.error=custom  ...
-./calc.at:1524: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | (#) + (#) = 2222
+stderr:
+./calc.at:1491:  $PREPARSER ./calc  input
 ./calc.at:1489: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -217335,10 +213721,323 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 118):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 118):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 103):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!'])
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 118):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 7 (line 103):
+   $1 = nterm exp (2222)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (3333)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (3333)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 82):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of file ()
+Entering state 16
+Cleanup: popping token end of file ()
+Cleanup: popping nterm input ()
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
+Reading a token
+Next token is token ')' (1.3: )
+Entering state 11
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
+Entering state 4
+Reading a token
+1.8: syntax error: invalid character: '#'
+Next token is token error (1.8: )
+Shifting token error (1.8: )
+Entering state 11
+Next token is token error (1.8: )
+Error: discarding token error (1.8: )
+Reading a token
+Next token is token ')' (1.9: )
+Entering state 11
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.7: )
+   $2 = token error (1.8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
+Entering state 29
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
+Entering state 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 18
+Reading a token
+Next token is token "number" (1.13-16: 2222)
+Shifting token "number" (1.13-16: 2222)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1489: cat stderr
+stderr:
+./calc.at:1487: cat stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
+Reading a token
+Next token is token ')' (1.3: )
+Entering state 11
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
+Entering state 4
+Reading a token
+1.8: syntax error: invalid character: '#'
+Next token is token error (1.8: )
+Shifting token error (1.8: )
+Entering state 11
+Next token is token error (1.8: )
+Error: discarding token error (1.8: )
+Reading a token
+Next token is token ')' (1.9: )
+Entering state 11
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.7: )
+   $2 = token error (1.8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
+Entering state 29
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
+Entering state 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 18
+Reading a token
+Next token is token "number" (1.13-16: 2222)
+Shifting token "number" (1.13-16: 2222)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 input:
   | 1 + 2 * 3 + !+ ++
-./calc.at:1489:  $PREPARSER ./calc  input
+./calc.at:1487:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -217346,7 +214045,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 8
@@ -217358,7 +214057,7 @@
 Next token is token number (2)
 Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 29
@@ -217370,20 +214069,20 @@
 Next token is token number (3)
 Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (3)
 -> $$ = nterm exp (3)
 Entering state 30
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 92):
+Reducing stack 0 by rule 9 (line 105):
    $1 = nterm exp (2)
    $2 = token '*' ()
    $3 = nterm exp (3)
 -> $$ = nterm exp (6)
 Entering state 29
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (6)
@@ -217400,21 +214099,55 @@
 Next token is token '+' ()
 Shifting token '+' ()
 Entering state 14
-Reducing stack 0 by rule 17 (line 108):
+Reducing stack 0 by rule 17 (line 121):
    $1 = token '!' ()
    $2 = token '+' ()
 Cleanup: popping token '+' ()
 Cleanup: popping nterm exp (7)
+./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 
+  | +1
+./calc.at:1489:  $PREPARSER ./calc  input
+./calc.at:1491: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 74):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '+' ()
+syntax error, unexpected '+'
+Error: popping nterm input ()
+Cleanup: discarding lookahead token '+' ()
 ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-582. calc.at:1523: stderr:
- skipped (calc.at:1523)
 Starting parse
 Entering state 0
 Reading a token
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 8
@@ -217426,7 +214159,7 @@
 Next token is token number (2)
 Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 29
@@ -217438,20 +214171,20 @@
 Next token is token number (3)
 Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (3)
 -> $$ = nterm exp (3)
 Entering state 30
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 92):
+Reducing stack 0 by rule 9 (line 105):
    $1 = nterm exp (2)
    $2 = token '*' ()
    $3 = nterm exp (3)
 -> $$ = nterm exp (6)
 Entering state 29
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (6)
@@ -217468,23 +214201,44 @@
 Next token is token '+' ()
 Shifting token '+' ()
 Entering state 14
-Reducing stack 0 by rule 17 (line 108):
+Reducing stack 0 by rule 17 (line 121):
    $1 = token '!' ()
    $2 = token '+' ()
 Cleanup: popping token '+' ()
 Cleanup: popping nterm exp (7)
+stderr:
 input:
-
+./calc.at:1491: cat stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 3
+Reducing stack 0 by rule 3 (line 74):
+   $1 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Next token is token '+' ()
+syntax error, unexpected '+'
+Error: popping nterm input ()
+Cleanup: discarding lookahead token '+' ()
   | 1 + 2 * 3 + !- ++
-./calc.at:1489:  $PREPARSER ./calc  input
+./calc.at:1487:  $PREPARSER ./calc  input
 stderr:
+input:
 Starting parse
 Entering state 0
 Reading a token
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 8
@@ -217496,7 +214250,7 @@
 Next token is token number (2)
 Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 29
@@ -217508,20 +214262,20 @@
 Next token is token number (3)
 Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (3)
 -> $$ = nterm exp (3)
 Entering state 30
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 92):
+Reducing stack 0 by rule 9 (line 105):
    $1 = nterm exp (2)
    $2 = token '*' ()
    $3 = nterm exp (3)
 -> $$ = nterm exp (6)
 Entering state 29
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (6)
@@ -217538,12 +214292,15 @@
 Next token is token '-' ()
 Shifting token '-' ()
 Entering state 13
-Reducing stack 0 by rule 18 (line 109):
+Reducing stack 0 by rule 18 (line 122):
    $1 = token '!' ()
    $2 = token '-' ()
 Cleanup: popping token '+' ()
 Cleanup: popping nterm exp (7)
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (1 + #) = 1111
+./calc.at:1491:  $PREPARSER ./calc  input
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -217551,7 +214308,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 8
@@ -217563,7 +214320,7 @@
 Next token is token number (2)
 Shifting token number (2)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (2)
 -> $$ = nterm exp (2)
 Entering state 29
@@ -217575,20 +214332,20 @@
 Next token is token number (3)
 Shifting token number (3)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (3)
 -> $$ = nterm exp (3)
 Entering state 30
 Reading a token
 Next token is token '+' ()
-Reducing stack 0 by rule 9 (line 92):
+Reducing stack 0 by rule 9 (line 105):
    $1 = nterm exp (2)
    $2 = token '*' ()
    $3 = nterm exp (3)
 -> $$ = nterm exp (6)
 Entering state 29
 Next token is token '+' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (6)
@@ -217605,11 +214362,88 @@
 Next token is token '-' ()
 Shifting token '-' ()
 Entering state 13
-Reducing stack 0 by rule 18 (line 109):
+Reducing stack 0 by rule 18 (line 122):
    $1 = token '!' ()
    $2 = token '-' ()
 Cleanup: popping token '+' ()
 Cleanup: popping nterm exp (7)
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
+Reading a token
+Next token is token ')' (1.7: )
+Entering state 11
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 18
+Reading a token
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1489: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -217620,14 +214454,110 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-583. calc.at:1524: ./calc.at:1489: cat stderr
- skipped (calc.at:1524)
-
-584. calc.at:1525: testing Calculator D %locations parse.lac=full parse.error=detailed parse.trace  ...
-./calc.at:1525: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
+Reading a token
+Next token is token ')' (1.7: )
+Entering state 11
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 18
+Reading a token
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1489: cat stderr
+./calc.at:1487: cat stderr
+./calc.at:1489:  $PREPARSER ./calc  /dev/null
+./calc.at:1491: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
 input:
+./calc.at:1491: cat stderr
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+syntax error, unexpected end of input
+Cleanup: discarding lookahead token end of input ()
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | (#) + (#) = 2222
-./calc.at:1489:  $PREPARSER ./calc  input
+./calc.at:1487:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -217648,7 +214578,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -217675,7 +214605,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -217683,7 +214613,7 @@
 Entering state 29
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1111)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -217696,13 +214626,13 @@
 Next token is token number (2222)
 Shifting token number (2222)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (2222)
 -> $$ = nterm exp (2222)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (2222)
    $2 = token '=' ()
    $3 = nterm exp (2222)
@@ -217711,22 +214641,23 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -217747,7 +214678,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -217774,7 +214705,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -217782,7 +214713,7 @@
 Entering state 29
 Reading a token
 Next token is token '=' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1111)
    $2 = token '+' ()
    $3 = nterm exp (1111)
@@ -217795,13 +214726,13 @@
 Next token is token number (2222)
 Shifting token number (2222)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (2222)
 -> $$ = nterm exp (2222)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (2222)
    $2 = token '=' ()
    $3 = nterm exp (2222)
@@ -217810,21 +214741,101 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+syntax error, unexpected end of input
+Cleanup: discarding lookahead token end of input ()
+input:
+  | (# + 1) = 1111
+./calc.at:1491:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Reading a token
+Next token is token "number" (1.6: 1)
+Error: discarding token "number" (1.6: 1)
+Reading a token
+Next token is token ')' (1.7: )
+Entering state 11
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 18
+Reading a token
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1489: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -217835,11 +214846,83 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1489: cat stderr
+./calc.at:1487: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Reading a token
+Next token is token "number" (1.6: 1)
+Error: discarding token "number" (1.6: 1)
+Reading a token
+Next token is token ')' (1.7: )
+Entering state 11
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 18
+Reading a token
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 input:
   | (1 + #) = 1111
-./calc.at:1489:  $PREPARSER ./calc  input
+./calc.at:1487:  $PREPARSER ./calc  input
+./calc.at:1489: cat stderr
 stderr:
+input:
 Starting parse
 Entering state 0
 Reading a token
@@ -217850,7 +214933,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -217873,7 +214956,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -217887,13 +214970,13 @@
 Next token is token number (1111)
 Shifting token number (1111)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1111)
 -> $$ = nterm exp (1111)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (1111)
    $2 = token '=' ()
    $3 = nterm exp (1111)
@@ -217902,8 +214985,271 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 82):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of file ()
+Entering state 16
+Cleanup: popping token end of file ()
+Cleanup: popping nterm input ()
+./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1491: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1489:  $PREPARSER ./calc  input
+./calc.at:1491: cat stderr
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token ')' ()
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (3)
+Entering state 12
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token ')' ()
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' ()
+Error: popping nterm exp (3)
+Shifting token error ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
    $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2222)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (3333)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 30
+Reading a token
+Next token is token '*' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1)
+   $2 = token '*' ()
+   $3 = nterm exp (2)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' ()
+Error: popping nterm exp (2)
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (3333)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (4444)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (4444)
+   $2 = token '=' ()
+   $3 = nterm exp (1)
+error: 4444 != 1
+-> $$ = nterm exp (4444)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (4444)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
@@ -217918,7 +215264,6 @@
 Cleanup: popping token end of input ()
 Cleanup: popping nterm input ()
 ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -217929,7 +215274,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -217952,7 +215297,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -217966,13 +215311,13 @@
 Next token is token number (1111)
 Shifting token number (1111)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1111)
 -> $$ = nterm exp (1111)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (1111)
    $2 = token '=' ()
    $3 = nterm exp (1111)
@@ -217981,8 +215326,259 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 82):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of file ()
+Entering state 16
+Cleanup: popping token end of file ()
+Cleanup: popping nterm input ()
+input:
+  | (1 + # + 1) = 1111
+./calc.at:1491:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token ')' ()
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (3)
+Entering state 12
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token ')' ()
+syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' ()
+Error: popping nterm exp (3)
+Shifting token error ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
    $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2222)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (3333)
+Entering state 8
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token number (2)
+Shifting token number (2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 30
+Reading a token
+Next token is token '*' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1)
+   $2 = token '*' ()
+   $3 = nterm exp (2)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' ()
+Error: popping nterm exp (2)
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (3333)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (4444)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (4444)
+   $2 = token '=' ()
+   $3 = nterm exp (1)
+error: 4444 != 1
+-> $$ = nterm exp (4444)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (4444)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
@@ -217996,9 +215592,176 @@
 Entering state 16
 Cleanup: popping token end of input ()
 Cleanup: popping nterm input ()
-584. calc.at:1525: 585. calc.at:1530: testing Calculator D api.token.constructor %locations parse.error=custom api.value.type=union  ...
-./calc.at:1530: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
- skipped (calc.at:1525)
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Reading a token
+Next token is token "number" (1.10: 1)
+Error: discarding token "number" (1.10: 1)
+Reading a token
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 18
+Reading a token
+Next token is token "number" (1.15-18: 1111)
+Shifting token "number" (1.15-18: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1487: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Reading a token
+Next token is token "number" (1.10: 1)
+Error: discarding token "number" (1.10: 1)
+Reading a token
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 18
+Reading a token
+Next token is token "number" (1.15-18: 1111)
+Shifting token "number" (1.15-18: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1489: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -218009,11 +215772,20 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1489: cat stderr
-
 input:
   | (# + 1) = 1111
-./calc.at:1489:  $PREPARSER ./calc  input
+./calc.at:1487:  $PREPARSER ./calc  input
+./calc.at:1491: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1489: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -218040,7 +215812,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -218054,13 +215826,13 @@
 Next token is token number (1111)
 Shifting token number (1111)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1111)
 -> $$ = nterm exp (1111)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (1111)
    $2 = token '=' ()
    $3 = nterm exp (1111)
@@ -218069,23 +215841,24 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (1111)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1491: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -218111,7 +215884,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -218125,13 +215898,13 @@
 Next token is token number (1111)
 Shifting token number (1111)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1111)
 -> $$ = nterm exp (1111)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (1111)
    $2 = token '=' ()
    $3 = nterm exp (1111)
@@ -218140,8 +215913,357 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
+Reducing stack 0 by rule 4 (line 88):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 82):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of file ()
+Entering state 16
+Cleanup: popping token end of file ()
+Cleanup: popping nterm input ()
+input:
+  | (!!) + (1 2) = 1
+./calc.at:1489:  $PREPARSER ./calc  input
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1491:  $PREPARSER ./calc  input
+stderr:
+stderr:
+./calc.at:1487: cat stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
+Reading a token
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 15
+Reducing stack 0 by rule 16 (line 107):
+   $1 = token '!' ()
+   $2 = token '!' ()
+Shifting token error ()
+Entering state 11
+Reading a token
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token number (2)
+syntax error, unexpected number
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token number (2)
+Error: discarding token number (2)
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (2222)
+   $2 = token '=' ()
+   $3 = nterm exp (1)
+error: 2222 != 1
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2222)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.6: 1)
+Shifting token "number" (1.6: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 29
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Reading a token
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 22
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.12: 1)
+Shifting token "number" (1.12: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.16: 1)
+Shifting token "number" (1.16: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 31
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 10 (line 93):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 24
 Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
+Reading a token
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 15
+Reducing stack 0 by rule 16 (line 107):
+   $1 = token '!' ()
+   $2 = token '!' ()
+Shifting token error ()
+Entering state 11
+Reading a token
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token number (2)
+syntax error, unexpected number
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token number (2)
+Error: discarding token number (2)
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 90):
    $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (2222)
+   $2 = token '=' ()
+   $3 = nterm exp (1)
+error: 2222 != 1
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2222)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
@@ -218155,26 +216277,129 @@
 Entering state 16
 Cleanup: popping token end of input ()
 Cleanup: popping nterm input ()
-585. calc.at:1530:  skipped (calc.at:1530)
-./calc.at:1489: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1489: cat stderr
-
 input:
   | (1 + # + 1) = 1111
-./calc.at:1489:  $PREPARSER ./calc  input
+./calc.at:1487:  $PREPARSER ./calc  input
+stderr:
 stderr:
 Starting parse
 Entering state 0
 Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.6: 1)
+Shifting token "number" (1.6: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 29
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Reading a token
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 22
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.12: 1)
+Shifting token "number" (1.12: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.16: 1)
+Shifting token "number" (1.16: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 31
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 10 (line 93):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+Starting parse
+Entering state 0
+Reading a token
 Next token is token '(' ()
 Shifting token '(' ()
 Entering state 4
@@ -218182,7 +216407,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -218211,7 +216436,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -218225,13 +216450,13 @@
 Next token is token number (1111)
 Shifting token number (1111)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1111)
 -> $$ = nterm exp (1111)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (1111)
    $2 = token '=' ()
    $3 = nterm exp (1111)
@@ -218240,23 +216465,33 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (1111)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1489: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Reading a token
@@ -218267,7 +216502,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -218296,7 +216531,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Reducing stack 0 by rule 14 (line 118):
    $1 = token '(' ()
    $2 = token error ()
    $3 = token ')' ()
@@ -218310,13 +216545,13 @@
 Next token is token number (1111)
 Shifting token number (1111)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1111)
 -> $$ = nterm exp (1111)
 Entering state 27
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 6 (line 80):
+Reducing stack 0 by rule 6 (line 93):
    $1 = nterm exp (1111)
    $2 = token '=' ()
    $3 = nterm exp (1111)
@@ -218325,24 +216560,22 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (1111)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-586. calc.at:1531: testing Calculator D api.token.constructor %locations parse.error=detailed  ...
-./calc.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
-./calc.at:1489: "$PERL" -pi -e 'use strict;
+./calc.at:1491: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -218352,13 +216585,14 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-587. calc.at:1532: testing Calculator D api.push-pull=both  ...
-./calc.at:1532: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
 ./calc.at:1489: cat stderr
 input:
-  | (1 + 1) / (1 - 1)
+./calc.at:1491: cat stderr
+  | (- *) + (1 2) = 1
 ./calc.at:1489:  $PREPARSER ./calc  input
-587. calc.at:1532: stderr:
+./calc.at:1487: cat stderr
+stderr:
+565. calc.at:1491:  ok
 Starting parse
 Entering state 0
 Reading a token
@@ -218366,6 +216600,42 @@
 Shifting token '(' ()
 Entering state 4
 Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 9
+Reducing stack 0 by rule 15 (line 106):
+   $1 = token '-' ()
+   $2 = token error ()
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
@@ -218374,20 +216644,222 @@
 -> $$ = nterm exp (1)
 Entering state 12
 Reading a token
+Next token is token number (2)
+syntax error, unexpected number
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token number (2)
+Error: discarding token number (2)
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (2222)
+   $2 = token '=' ()
+   $3 = nterm exp (1)
+error: 2222 != 1
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2222)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+stderr:
+  | (1 + 1) / (1 - 1)
+./calc.at:1487:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 2
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 9
+Reducing stack 0 by rule 15 (line 106):
+   $1 = token '-' ()
+   $2 = token error ()
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
 Next token is token '+' ()
 Shifting token '+' ()
 Entering state 20
 Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
    $1 = token number (1)
 -> $$ = nterm exp (1)
-Entering state 29
+Entering state 12
+Reading a token
+Next token is token number (2)
+syntax error, unexpected number
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token number (2)
+Error: discarding token number (2)
 Reading a token
 Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '=' ()
 Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (2222)
+   $2 = token '=' ()
+   $3 = nterm exp (1)
+error: 2222 != 1
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2222)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 92):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
+Next token is token ')' ()
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (1)
@@ -218396,7 +216868,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+Reducing stack 0 by rule 13 (line 117):
    $1 = token '(' ()
    $2 = nterm exp (2)
    $3 = token ')' ()
@@ -218414,7 +216886,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -218426,13 +216898,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 28
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 91):
+Reducing stack 0 by rule 8 (line 104):
    $1 = nterm exp (1)
    $2 = token '-' ()
    $3 = nterm exp (1)
@@ -218441,7 +216913,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+Reducing stack 0 by rule 13 (line 117):
    $1 = token '(' ()
    $2 = nterm exp (0)
    $3 = token ')' ()
@@ -218449,7 +216921,7 @@
 Entering state 31
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 10 (line 93):
+Reducing stack 0 by rule 10 (line 106):
    $1 = nterm exp (2)
    $2 = token '/' ()
    $3 = nterm exp (0)
@@ -218459,23 +216931,23 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (2)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
- skipped (calc.at:1532)
+./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
 stderr:
 Starting parse
 Entering state 0
@@ -218487,7 +216959,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -218499,13 +216971,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 29
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 7 (line 90):
+Reducing stack 0 by rule 7 (line 103):
    $1 = nterm exp (1)
    $2 = token '+' ()
    $3 = nterm exp (1)
@@ -218514,7 +216986,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+Reducing stack 0 by rule 13 (line 117):
    $1 = token '(' ()
    $2 = nterm exp (2)
    $3 = token ')' ()
@@ -218532,7 +217004,7 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 12
@@ -218544,13 +217016,13 @@
 Next token is token number (1)
 Shifting token number (1)
 Entering state 1
-Reducing stack 0 by rule 5 (line 79):
+Reducing stack 0 by rule 5 (line 92):
    $1 = token number (1)
 -> $$ = nterm exp (1)
 Entering state 28
 Reading a token
 Next token is token ')' ()
-Reducing stack 0 by rule 8 (line 91):
+Reducing stack 0 by rule 8 (line 104):
    $1 = nterm exp (1)
    $2 = token '-' ()
    $3 = nterm exp (1)
@@ -218559,7 +217031,7 @@
 Next token is token ')' ()
 Shifting token ')' ()
 Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+Reducing stack 0 by rule 13 (line 117):
    $1 = token '(' ()
    $2 = nterm exp (0)
    $3 = token ')' ()
@@ -218567,7 +217039,7 @@
 Entering state 31
 Reading a token
 Next token is token '\n' ()
-Reducing stack 0 by rule 10 (line 93):
+Reducing stack 0 by rule 10 (line 106):
    $1 = nterm exp (2)
    $2 = token '/' ()
    $3 = nterm exp (0)
@@ -218577,23 +217049,21 @@
 Next token is token '\n' ()
 Shifting token '\n' ()
 Entering state 24
-Reducing stack 0 by rule 4 (line 75):
+Reducing stack 0 by rule 4 (line 88):
    $1 = nterm exp (2)
    $2 = token '\n' ()
 -> $$ = nterm line ()
 Entering state 7
-Reducing stack 0 by rule 1 (line 69):
+Reducing stack 0 by rule 1 (line 82):
    $1 = nterm line ()
 -> $$ = nterm input ()
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input ()
+Shifting token end of file ()
 Entering state 16
-Cleanup: popping token end of input ()
+Cleanup: popping token end of file ()
 Cleanup: popping nterm input ()
-586. calc.at:1531: 
- skipped (calc.at:1531)
 ./calc.at:1489: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -218604,609 +217074,2130 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-
 ./calc.at:1489: cat stderr
-564. calc.at:1489:  ok
-
-588. calc.at:1533: testing Calculator D parse.trace parse.error=custom %locations api.push-pull=both parse.lac=full  ...
-./calc.at:1533: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
-589. calc.at:1544: testing Calculator Java   ...
-./calc.at:1544: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
-588. calc.at:1533:  skipped (calc.at:1533)
-
-590. calc.at:1545: testing Calculator Java parse.error=custom  ...
-./calc.at:1545: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
-589. calc.at:1544:  skipped (calc.at:1544)
-
-592. calc.at:1547: testing Calculator Java parse.error=verbose  ...
-./calc.at:1547: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
-591. calc.at:1546: testing Calculator Java parse.error=detailed  ...
-./calc.at:1546: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
-590. calc.at:1545:  skipped (calc.at:1545)
-
-591. calc.at:1546: 592. calc.at:1547:  skipped (calc.at:1547)
- skipped (calc.at:1546)
-
-
-593. calc.at:1548: testing Calculator Java %locations parse.error=custom  ...
-./calc.at:1548: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
-593. calc.at:1548:  skipped (calc.at:1548)
-
-594. calc.at:1549: testing Calculator Java %locations parse.error=detailed  ...
-595. calc.at:1550: testing Calculator Java %locations parse.error=verbose  ...
-./calc.at:1550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
-./calc.at:1549: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
-594. calc.at:1549: 595. calc.at:1550:  skipped (calc.at:1550)
- skipped (calc.at:1549)
-596. calc.at:1551: testing Calculator Java parse.trace parse.error=verbose  ...
-./calc.at:1551: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
-
-
-597. calc.at:1552: testing Calculator Java parse.trace parse.error=verbose %locations %lex-param {InputStream is}  ...
-596. calc.at:1551:  skipped (calc.at:1551)
-./calc.at:1552: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
-
-598. calc.at:1554: testing Calculator Java api.push-pull=both  ...
-./calc.at:1554: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
-597. calc.at:1552:  skipped (calc.at:1552)
-
-598. calc.at:1554:  skipped (calc.at:1554)
-599. calc.at:1555: testing Calculator Java api.push-pull=both parse.error=detailed %locations  ...
-./calc.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
-
-600. calc.at:1556: testing Calculator Java parse.trace parse.error=custom %locations %lex-param {InputStream is} api.push-pull=both  ...
-./calc.at:1556: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
-599. calc.at:1555:  skipped (calc.at:1555)
-601. calc.at:1557: testing Calculator Java parse.trace parse.error=verbose %locations %lex-param {InputStream is} api.push-pull=both  ...
-./calc.at:1557: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
-
-600. calc.at:1556:  skipped (calc.at:1556)
-
-stderr:
-stdout:
-./calc.at:1492: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc calc.hh
-
+./calc.at:1487: cat stderr
+562. calc.at:1487:  ok
 input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1492:  $PREPARSER ./calc  input
+  | (* *) + (*) + (*)
+./calc.at:1489:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
 Entering state 8
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
+Next token is token '+' ()
+Shifting token '+' ()
 Entering state 20
 Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
 Reading a token
-Next token is token '=' (1.11: )
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
 Entering state 29
-Next token is token '=' (1.11: )
+Reading a token
+Next token is token '+' ()
 Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
 Entering state 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 18
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token "number" (1.13: 7)
-Shifting token "number" (1.13: 7)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.13: 7)
--> $$ = nterm exp (1.13: 7)
-Entering state 27
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
-Next token is token '\n' (1.14-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-9: 7)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13: 7)
--> $$ = nterm exp (1.1-13: 7)
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2222)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (3333)
 Entering state 8
-Next token is token '\n' (1.14-2.0: )
-Shifting token '\n' (1.14-2.0: )
+Next token is token '\n' ()
+Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-13: 7)
-   $2 = token '\n' (1.14-2.0: )
--> $$ = nterm line (1.1-2.0: )
+   $1 = nterm exp (3333)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+   $1 = nterm line ()
+-> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token "number" (2.1: 1)
-Shifting token "number" (2.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2.1: 1)
--> $$ = nterm exp (2.1: 1)
-Entering state 8
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+
+Starting parse
+Entering state 0
 Reading a token
-Next token is token '+' (2.3: )
-Shifting token '+' (2.3: )
-Entering state 20
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
-Next token is token "number" (2.5: 2)
-Shifting token "number" (2.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2.5: 2)
--> $$ = nterm exp (2.5: 2)
-Entering state 29
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
 Reading a token
-Next token is token '*' (2.7: )
-Shifting token '*' (2.7: )
-Entering state 21
+Next token is token '*' ()
+Error: discarding token '*' ()
 Reading a token
-Next token is token '-' (2.9: )
-Shifting token '-' (2.9: )
-Entering state 2
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
 Reading a token
-Next token is token "number" (2.10: 3)
-Shifting token "number" (2.10: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2.10: 3)
--> $$ = nterm exp (2.10: 3)
-Entering state 10
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token '=' (2.12: )
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' (2.9: )
-   $2 = nterm exp (2.10: 3)
--> $$ = nterm exp (2.9-10: -3)
-Entering state 30
-Next token is token '=' (2.12: )
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (2.5: 2)
-   $2 = token '*' (2.7: )
-   $3 = nterm exp (2.9-10: -3)
--> $$ = nterm exp (2.5-10: -6)
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
 Entering state 29
-Next token is token '=' (2.12: )
+Reading a token
+Next token is token '+' ()
 Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (2.1: 1)
-   $2 = token '+' (2.3: )
-   $3 = nterm exp (2.5-10: -6)
--> $$ = nterm exp (2.1-10: -5)
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
 Entering state 8
-Next token is token '=' (2.12: )
-Shifting token '=' (2.12: )
-Entering state 18
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token '-' (2.14: )
-Shifting token '-' (2.14: )
-Entering state 2
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
 Reading a token
-Next token is token "number" (2.15: 5)
-Shifting token "number" (2.15: 5)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (2.15: 5)
--> $$ = nterm exp (2.15: 5)
-Entering state 10
+Next token is token '*' ()
+syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error ()
+Entering state 11
+Next token is token '*' ()
+Error: discarding token '*' ()
 Reading a token
-Next token is token '\n' (2.16-3.0: )
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' (2.14: )
-   $2 = nterm exp (2.15: 5)
--> $$ = nterm exp (2.14-15: -5)
-Entering state 27
-Next token is token '\n' (2.16-3.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (2.1-10: -5)
-   $2 = token '=' (2.12: )
-   $3 = nterm exp (2.14-15: -5)
--> $$ = nterm exp (2.1-15: -5)
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2222)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (3333)
 Entering state 8
-Next token is token '\n' (2.16-3.0: )
-Shifting token '\n' (2.16-3.0: )
+Next token is token '\n' ()
+Shifting token '\n' ()
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (2.1-15: -5)
-   $2 = token '\n' (2.16-3.0: )
--> $$ = nterm line (2.1-3.0: )
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input (1.1-2.0: )
-   $2 = nterm line (2.1-3.0: )
--> $$ = nterm input (1.1-3.0: )
-Entering state 6
-Reading a token
-Next token is token '\n' (3.1-4.0: )
-Shifting token '\n' (3.1-4.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 74):
-   $1 = token '\n' (3.1-4.0: )
--> $$ = nterm line (3.1-4.0: )
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input (1.1-3.0: )
-   $2 = nterm line (3.1-4.0: )
--> $$ = nterm input (1.1-4.0: )
+   $1 = nterm exp (3333)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
 Entering state 6
 Reading a token
-Next token is token '-' (4.1: )
-Shifting token '-' (4.1: )
-Entering state 2
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+571. calc.at:1504: testing Calculator lalr1.d  ...
+./calc.at:1504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+./calc.at:1489: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1489: cat stderr
+input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1489:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
 Reading a token
-Next token is token "number" (4.2: 1)
-Shifting token "number" (4.2: 1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (4.2: 1)
--> $$ = nterm exp (4.2: 1)
-Entering state 10
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
 Reading a token
-Next token is token '^' (4.3: )
-Shifting token '^' (4.3: )
-Entering state 23
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token "number" (4.4: 2)
-Shifting token "number" (4.4: 2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (4.4: 2)
--> $$ = nterm exp (4.4: 2)
-Entering state 32
-Reading a token
-Next token is token '=' (4.6: )
-Reducing stack 0 by rule 12 (line 103):
-   $1 = nterm exp (4.2: 1)
-   $2 = token '^' (4.3: )
-   $3 = nterm exp (4.4: 2)
--> $$ = nterm exp (4.2-4: 1)
-Entering state 10
-Next token is token '=' (4.6: )
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' (4.1: )
-   $2 = nterm exp (4.2-4: 1)
--> $$ = nterm exp (4.1-4: -1)
-Entering state 8
-Next token is token '=' (4.6: )
-Shifting token '=' (4.6: )
-Entering state 18
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 29
 Reading a token
-Next token is token '-' (4.8: )
-Shifting token '-' (4.8: )
-Entering state 2
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
 Reading a token
-Next token is token "number" (4.9: 1)
-Shifting token "number" (4.9: 1)
+Next token is token number (3)
+Shifting token number (3)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (4.9: 1)
--> $$ = nterm exp (4.9: 1)
-Entering state 10
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 30
 Reading a token
-Next token is token '\n' (4.10-5.0: )
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' (4.8: )
-   $2 = nterm exp (4.9: 1)
--> $$ = nterm exp (4.8-9: -1)
-Entering state 27
-Next token is token '\n' (4.10-5.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (4.1-4: -1)
-   $2 = token '=' (4.6: )
-   $3 = nterm exp (4.8-9: -1)
--> $$ = nterm exp (4.1-9: -1)
+Next token is token '+' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
 Entering state 8
-Next token is token '\n' (4.10-5.0: )
-Shifting token '\n' (4.10-5.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (4.1-9: -1)
-   $2 = token '\n' (4.10-5.0: )
--> $$ = nterm line (4.1-5.0: )
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input (1.1-4.0: )
-   $2 = nterm line (4.1-5.0: )
--> $$ = nterm input (1.1-5.0: )
-Entering state 6
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token '(' (5.1: )
-Shifting token '(' (5.1: )
-Entering state 4
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
 Reading a token
-Next token is token '-' (5.2: )
-Shifting token '-' (5.2: )
-Entering state 2
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 14
+Reducing stack 0 by rule 17 (line 108):
+   $1 = token '!' ()
+   $2 = token '+' ()
+Cleanup: popping token '+' ()
+Cleanup: popping nterm exp (7)
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+572. calc.at:1509: testing Calculator D   ...
+./calc.at:1509: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+Starting parse
+Entering state 0
 Reading a token
-Next token is token "number" (5.3: 1)
-Shifting token "number" (5.3: 1)
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (5.3: 1)
--> $$ = nterm exp (5.3: 1)
-Entering state 10
-Reading a token
-Next token is token ')' (5.4: )
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' (5.2: )
-   $2 = nterm exp (5.3: 1)
--> $$ = nterm exp (5.2-3: -1)
-Entering state 12
-Next token is token ')' (5.4: )
-Shifting token ')' (5.4: )
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' (5.1: )
-   $2 = nterm exp (5.2-3: -1)
-   $3 = token ')' (5.4: )
--> $$ = nterm exp (5.1-4: -1)
+   $1 = token number (1)
+-> $$ = nterm exp (1)
 Entering state 8
 Reading a token
-Next token is token '^' (5.5: )
-Shifting token '^' (5.5: )
-Entering state 23
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token "number" (5.6: 2)
-Shifting token "number" (5.6: 2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (5.6: 2)
--> $$ = nterm exp (5.6: 2)
-Entering state 32
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 29
 Reading a token
-Next token is token '=' (5.8: )
-Reducing stack 0 by rule 12 (line 103):
-   $1 = nterm exp (5.1-4: -1)
-   $2 = token '^' (5.5: )
-   $3 = nterm exp (5.6: 2)
--> $$ = nterm exp (5.1-6: 1)
-Entering state 8
-Next token is token '=' (5.8: )
-Shifting token '=' (5.8: )
-Entering state 18
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
 Reading a token
-Next token is token "number" (5.10: 1)
-Shifting token "number" (5.10: 1)
+Next token is token number (3)
+Shifting token number (3)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (5.10: 1)
--> $$ = nterm exp (5.10: 1)
-Entering state 27
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 30
 Reading a token
-Next token is token '\n' (5.11-6.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (5.1-6: 1)
-   $2 = token '=' (5.8: )
-   $3 = nterm exp (5.10: 1)
--> $$ = nterm exp (5.1-10: 1)
+Next token is token '+' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
 Entering state 8
-Next token is token '\n' (5.11-6.0: )
-Shifting token '\n' (5.11-6.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (5.1-10: 1)
-   $2 = token '\n' (5.11-6.0: )
--> $$ = nterm line (5.1-6.0: )
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input (1.1-5.0: )
-   $2 = nterm line (5.1-6.0: )
--> $$ = nterm input (1.1-6.0: )
-Entering state 6
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token '\n' (6.1-7.0: )
-Shifting token '\n' (6.1-7.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 74):
-   $1 = token '\n' (6.1-7.0: )
--> $$ = nterm line (6.1-7.0: )
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input (1.1-6.0: )
-   $2 = nterm line (6.1-7.0: )
--> $$ = nterm input (1.1-7.0: )
-Entering state 6
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
 Reading a token
-Next token is token '-' (7.1: )
-Shifting token '-' (7.1: )
-Entering state 2
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 14
+Reducing stack 0 by rule 17 (line 108):
+   $1 = token '!' ()
+   $2 = token '+' ()
+Cleanup: popping token '+' ()
+Cleanup: popping nterm exp (7)
+input:
+571. calc.at:1504:   | 1 + 2 * 3 + !- ++
+./calc.at:1489:  $PREPARSER ./calc  input
+ skipped (calc.at:1504)
+stderr:
+Starting parse
+Entering state 0
 Reading a token
-Next token is token '-' (7.2: )
-Shifting token '-' (7.2: )
-Entering state 2
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 8
 Reading a token
-Next token is token '-' (7.3: )
-Shifting token '-' (7.3: )
-Entering state 2
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token "number" (7.4: 1)
-Shifting token "number" (7.4: 1)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (7.4: 1)
--> $$ = nterm exp (7.4: 1)
-Entering state 10
-Reading a token
-Next token is token '=' (7.6: )
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' (7.3: )
-   $2 = nterm exp (7.4: 1)
--> $$ = nterm exp (7.3-4: -1)
-Entering state 10
-Next token is token '=' (7.6: )
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' (7.2: )
-   $2 = nterm exp (7.3-4: -1)
--> $$ = nterm exp (7.2-4: 1)
-Entering state 10
-Next token is token '=' (7.6: )
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' (7.1: )
-   $2 = nterm exp (7.2-4: 1)
--> $$ = nterm exp (7.1-4: -1)
-Entering state 8
-Next token is token '=' (7.6: )
-Shifting token '=' (7.6: )
-Entering state 18
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 29
 Reading a token
-Next token is token '-' (7.8: )
-Shifting token '-' (7.8: )
-Entering state 2
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
 Reading a token
-Next token is token "number" (7.9: 1)
-Shifting token "number" (7.9: 1)
+Next token is token number (3)
+Shifting token number (3)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (7.9: 1)
--> $$ = nterm exp (7.9: 1)
-Entering state 10
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 30
 Reading a token
-Next token is token '\n' (7.10-8.0: )
-Reducing stack 0 by rule 11 (line 102):
-   $1 = token '-' (7.8: )
-   $2 = nterm exp (7.9: 1)
--> $$ = nterm exp (7.8-9: -1)
-Entering state 27
-Next token is token '\n' (7.10-8.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (7.1-4: -1)
-   $2 = token '=' (7.6: )
-   $3 = nterm exp (7.8-9: -1)
--> $$ = nterm exp (7.1-9: -1)
+Next token is token '+' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
 Entering state 8
-Next token is token '\n' (7.10-8.0: )
-Shifting token '\n' (7.10-8.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (7.1-9: -1)
-   $2 = token '\n' (7.10-8.0: )
--> $$ = nterm line (7.1-8.0: )
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input (1.1-7.0: )
-   $2 = nterm line (7.1-8.0: )
--> $$ = nterm input (1.1-8.0: )
-Entering state 6
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token '\n' (8.1-9.0: )
-Shifting token '\n' (8.1-9.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 74):
-   $1 = token '\n' (8.1-9.0: )
--> $$ = nterm line (8.1-9.0: )
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input (1.1-8.0: )
-   $2 = nterm line (8.1-9.0: )
--> $$ = nterm input (1.1-9.0: )
-Entering state 6
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
 Reading a token
-Next token is token "number" (9.1: 1)
-Shifting token "number" (9.1: 1)
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 13
+Reducing stack 0 by rule 18 (line 109):
+   $1 = token '!' ()
+   $2 = token '-' ()
+Cleanup: popping token '+' ()
+Cleanup: popping nterm exp (7)
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (9.1: 1)
--> $$ = nterm exp (9.1: 1)
+   $1 = token number (1)
+-> $$ = nterm exp (1)
 Entering state 8
 Reading a token
-Next token is token '-' (9.3: )
-Shifting token '-' (9.3: )
-Entering state 19
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token "number" (9.5: 2)
-Shifting token "number" (9.5: 2)
+Next token is token number (2)
+Shifting token number (2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (9.5: 2)
--> $$ = nterm exp (9.5: 2)
-Entering state 28
+   $1 = token number (2)
+-> $$ = nterm exp (2)
+Entering state 29
 Reading a token
-Next token is token '-' (9.7: )
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (9.1: 1)
-   $2 = token '-' (9.3: )
-   $3 = nterm exp (9.5: 2)
--> $$ = nterm exp (9.1-5: -1)
-Entering state 8
-Next token is token '-' (9.7: )
-Shifting token '-' (9.7: )
-Entering state 19
+Next token is token '*' ()
+Shifting token '*' ()
+Entering state 21
 Reading a token
-Next token is token "number" (9.9: 3)
-Shifting token "number" (9.9: 3)
+Next token is token number (3)
+Shifting token number (3)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (9.9: 3)
--> $$ = nterm exp (9.9: 3)
-Entering state 28
+   $1 = token number (3)
+-> $$ = nterm exp (3)
+Entering state 30
 Reading a token
-Next token is token '=' (9.11: )
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (9.1-5: -1)
-   $2 = token '-' (9.7: )
-   $3 = nterm exp (9.9: 3)
--> $$ = nterm exp (9.1-9: -4)
+Next token is token '+' ()
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (2)
+   $2 = token '*' ()
+   $3 = nterm exp (3)
+-> $$ = nterm exp (6)
+Entering state 29
+Next token is token '+' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (6)
+-> $$ = nterm exp (7)
 Entering state 8
-Next token is token '=' (9.11: )
-Shifting token '=' (9.11: )
-Entering state 18
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
 Reading a token
-Next token is token '-' (9.13: )
-Shifting token '-' (9.13: )
+Next token is token '!' ()
+Shifting token '!' ()
+Entering state 5
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 13
+Reducing stack 0 by rule 18 (line 109):
+   $1 = token '!' ()
+   $2 = token '-' ()
+Cleanup: popping token '+' ()
+Cleanup: popping nterm exp (7)
+572. calc.at:1509: ./calc.at:1489: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+ skipped (calc.at:1509)
+./calc.at:1489: cat stderr
+
+input:
+  | (#) + (#) = 2222
+./calc.at:1489:  $PREPARSER ./calc  input
+stderr:
+573. calc.at:1510: testing Calculator D %locations  ...
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (2222)
+Shifting token number (2222)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (2222)
+-> $$ = nterm exp (2222)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (2222)
+   $2 = token '=' ()
+   $3 = nterm exp (2222)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2222)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1510: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 29
+Reading a token
+Next token is token '=' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1111)
+   $2 = token '+' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (2222)
+Shifting token number (2222)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (2222)
+-> $$ = nterm exp (2222)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (2222)
+   $2 = token '=' ()
+   $3 = nterm exp (2222)
+-> $$ = nterm exp (2222)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2222)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+./calc.at:1489: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1489: cat stderr
+input:
+  | (1 + #) = 1111
+./calc.at:1489:  $PREPARSER ./calc  input
+574. calc.at:1512: testing Calculator D parse.error=detailed api.prefix={calc} %verbose  ...
+./calc.at:1512: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+573. calc.at:1510: stderr:
+ skipped (calc.at:1510)
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (1111)
+Shifting token number (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (1111)
+Shifting token number (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+
+./calc.at:1489: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1489: cat stderr
+574. calc.at:1512:  skipped (calc.at:1512)
+input:
+  | (# + 1) = 1111
+./calc.at:1489:  $PREPARSER ./calc  input
+stderr:
+
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Reading a token
+Next token is token number (1)
+Error: discarding token number (1)
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (1111)
+Shifting token number (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Reading a token
+Next token is token number (1)
+Error: discarding token number (1)
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (1111)
+Shifting token number (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+575. calc.at:1514: testing Calculator D %debug  ...
+./calc.at:1514: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+./calc.at:1489: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+576. calc.at:1516: testing Calculator D parse.error=custom  ...
+./calc.at:1516: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+./calc.at:1489: cat stderr
+input:
+  | (1 + # + 1) = 1111
+./calc.at:1489:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Reading a token
+Next token is token number (1)
+Error: discarding token number (1)
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (1111)
+Shifting token number (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+575. calc.at:1514:  skipped (calc.at:1514)
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+syntax error: invalid character: '#'
+Next token is token error ()
+Error: popping token '+' ()
+Error: popping nterm exp (1)
+Shifting token error ()
+Entering state 11
+Next token is token error ()
+Error: discarding token error ()
+Reading a token
+Next token is token '+' ()
+Error: discarding token '+' ()
+Reading a token
+Next token is token number (1)
+Error: discarding token number (1)
+Reading a token
+Next token is token ')' ()
+Entering state 11
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' ()
+   $2 = token error ()
+   $3 = token ')' ()
+-> $$ = nterm exp (1111)
+Entering state 8
+Reading a token
+Next token is token '=' ()
+Shifting token '=' ()
+Entering state 18
+Reading a token
+Next token is token number (1111)
+Shifting token number (1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1111)
+-> $$ = nterm exp (1111)
+Entering state 27
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1111)
+   $2 = token '=' ()
+   $3 = nterm exp (1111)
+-> $$ = nterm exp (1111)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1111)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+
+576. calc.at:1516: ./calc.at:1489: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+ skipped (calc.at:1516)
+./calc.at:1489: cat stderr
+
+input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1489:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
+Next token is token ')' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' ()
+   $2 = nterm exp (2)
+   $3 = token ')' ()
+-> $$ = nterm exp (2)
+Entering state 8
+Reading a token
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 28
+Reading a token
+Next token is token ')' ()
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1)
+   $2 = token '-' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (0)
+Entering state 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' ()
+   $2 = nterm exp (0)
+   $3 = token ')' ()
+-> $$ = nterm exp (0)
+Entering state 31
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 10 (line 93):
+   $1 = nterm exp (2)
+   $2 = token '/' ()
+   $3 = nterm exp (0)
+error: null divisor
+-> $$ = nterm exp (2)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '+' ()
+Shifting token '+' ()
+Entering state 20
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 29
+Reading a token
+Next token is token ')' ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1)
+   $2 = token '+' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (2)
+Entering state 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' ()
+   $2 = nterm exp (2)
+   $3 = token ')' ()
+-> $$ = nterm exp (2)
+Entering state 8
+Reading a token
+Next token is token '/' ()
+Shifting token '/' ()
+Entering state 22
+Reading a token
+Next token is token '(' ()
+Shifting token '(' ()
+Entering state 4
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 12
+Reading a token
+Next token is token '-' ()
+Shifting token '-' ()
+Entering state 19
+Reading a token
+Next token is token number (1)
+Shifting token number (1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1)
+-> $$ = nterm exp (1)
+Entering state 28
+Reading a token
+Next token is token ')' ()
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1)
+   $2 = token '-' ()
+   $3 = nterm exp (1)
+-> $$ = nterm exp (0)
+Entering state 12
+Next token is token ')' ()
+Shifting token ')' ()
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' ()
+   $2 = nterm exp (0)
+   $3 = token ')' ()
+-> $$ = nterm exp (0)
+Entering state 31
+Reading a token
+Next token is token '\n' ()
+Reducing stack 0 by rule 10 (line 93):
+   $1 = nterm exp (2)
+   $2 = token '/' ()
+   $3 = nterm exp (0)
+error: null divisor
+-> $$ = nterm exp (2)
+Entering state 8
+Next token is token '\n' ()
+Shifting token '\n' ()
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2)
+   $2 = token '\n' ()
+-> $$ = nterm line ()
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line ()
+-> $$ = nterm input ()
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input ()
+Entering state 16
+Cleanup: popping token end of input ()
+Cleanup: popping nterm input ()
+577. calc.at:1517: testing Calculator D %locations parse.error=custom  ...
+./calc.at:1517: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+./calc.at:1489: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+578. calc.at:1518: testing Calculator D %locations parse.error=detailed  ...
+./calc.at:1489: cat stderr
+./calc.at:1518: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+564. calc.at:1489:  ok
+577. calc.at:1517:  skipped (calc.at:1517)
+
+
+578. calc.at:1518:  skipped (calc.at:1518)
+
+580. calc.at:1520: testing Calculator D parse.error=detailed %debug %verbose  ...
+./calc.at:1520: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+579. calc.at:1519: testing Calculator D %locations parse.error=simple  ...
+./calc.at:1519: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+581. calc.at:1521: testing Calculator D parse.error=detailed %debug api.symbol.prefix={SYMB_} api.token.prefix={TOK_} %verbose  ...
+./calc.at:1521: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+580. calc.at:1520:  skipped (calc.at:1520)
+579. calc.at:1519:  skipped (calc.at:1519)
+
+
+581. calc.at:1521:  skipped (calc.at:1521)
+
+583. calc.at:1524: testing Calculator D %locations parse.lac=full parse.error=custom  ...
+./calc.at:1524: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+582. calc.at:1523: testing Calculator D %locations parse.lac=full parse.error=detailed  ...
+./calc.at:1523: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+584. calc.at:1525: testing Calculator D %locations parse.lac=full parse.error=detailed parse.trace  ...
+./calc.at:1525: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+583. calc.at:1524: 582. calc.at:1523:  skipped (calc.at:1524)
+ skipped (calc.at:1523)
+
+584. calc.at:1525: 
+ skipped (calc.at:1525)
+
+585. calc.at:1530: testing Calculator D api.token.constructor %locations parse.error=custom api.value.type=union  ...
+./calc.at:1530: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+586. calc.at:1531: testing Calculator D api.token.constructor %locations parse.error=detailed  ...
+./calc.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+585. calc.at:1530: 587. calc.at:1532: testing Calculator D api.push-pull=both  ...
+ skipped (calc.at:1530)
+./calc.at:1532: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+
+586. calc.at:1531:  skipped (calc.at:1531)
+
+587. calc.at:1532:  skipped (calc.at:1532)
+588. calc.at:1533: testing Calculator D parse.trace parse.error=custom %locations api.push-pull=both parse.lac=full  ...
+
+./calc.at:1533: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y
+589. calc.at:1544: testing Calculator Java   ...
+./calc.at:1544: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
+590. calc.at:1545: testing Calculator Java parse.error=custom  ...
+stderr:
+./calc.at:1545: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
+stdout:
+./calc.at:1492: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc calc.hh
+
+588. calc.at:1533:  skipped (calc.at:1533)
+input:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1492:  $PREPARSER ./calc  input
+
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 18
+Reading a token
+Next token is token "number" (1.13: 7)
+Shifting token "number" (1.13: 7)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.13: 7)
+-> $$ = nterm exp (1.13: 7)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.14-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-9: 7)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13: 7)
+-> $$ = nterm exp (1.1-13: 7)
+Entering state 8
+Next token is token '\n' (1.14-2.0: )
+Shifting token '\n' (1.14-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-13: 7)
+   $2 = token '\n' (1.14-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Next token is token "number" (2.1: 1)
+Shifting token "number" (2.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2.1: 1)
+-> $$ = nterm exp (2.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (2.3: )
+Shifting token '+' (2.3: )
+Entering state 20
+Reading a token
+Next token is token "number" (2.5: 2)
+Shifting token "number" (2.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2.5: 2)
+-> $$ = nterm exp (2.5: 2)
+Entering state 29
+Reading a token
+Next token is token '*' (2.7: )
+Shifting token '*' (2.7: )
+Entering state 21
+Reading a token
+Next token is token '-' (2.9: )
+Shifting token '-' (2.9: )
+Entering state 2
+Reading a token
+Next token is token "number" (2.10: 3)
+Shifting token "number" (2.10: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2.10: 3)
+-> $$ = nterm exp (2.10: 3)
+Entering state 10
+Reading a token
+Next token is token '=' (2.12: )
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' (2.9: )
+   $2 = nterm exp (2.10: 3)
+-> $$ = nterm exp (2.9-10: -3)
+Entering state 30
+Next token is token '=' (2.12: )
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (2.5: 2)
+   $2 = token '*' (2.7: )
+   $3 = nterm exp (2.9-10: -3)
+-> $$ = nterm exp (2.5-10: -6)
+Entering state 29
+Next token is token '=' (2.12: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (2.1: 1)
+   $2 = token '+' (2.3: )
+   $3 = nterm exp (2.5-10: -6)
+-> $$ = nterm exp (2.1-10: -5)
+Entering state 8
+Next token is token '=' (2.12: )
+Shifting token '=' (2.12: )
+Entering state 18
+Reading a token
+Next token is token '-' (2.14: )
+Shifting token '-' (2.14: )
+Entering state 2
+Reading a token
+Next token is token "number" (2.15: 5)
+Shifting token "number" (2.15: 5)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (2.15: 5)
+-> $$ = nterm exp (2.15: 5)
+Entering state 10
+Reading a token
+Next token is token '\n' (2.16-3.0: )
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' (2.14: )
+   $2 = nterm exp (2.15: 5)
+-> $$ = nterm exp (2.14-15: -5)
+Entering state 27
+Next token is token '\n' (2.16-3.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (2.1-10: -5)
+   $2 = token '=' (2.12: )
+   $3 = nterm exp (2.14-15: -5)
+-> $$ = nterm exp (2.1-15: -5)
+Entering state 8
+Next token is token '\n' (2.16-3.0: )
+Shifting token '\n' (2.16-3.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (2.1-15: -5)
+   $2 = token '\n' (2.16-3.0: )
+-> $$ = nterm line (2.1-3.0: )
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input (1.1-2.0: )
+   $2 = nterm line (2.1-3.0: )
+-> $$ = nterm input (1.1-3.0: )
+Entering state 6
+Reading a token
+Next token is token '\n' (3.1-4.0: )
+Shifting token '\n' (3.1-4.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 74):
+   $1 = token '\n' (3.1-4.0: )
+-> $$ = nterm line (3.1-4.0: )
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input (1.1-3.0: )
+   $2 = nterm line (3.1-4.0: )
+-> $$ = nterm input (1.1-4.0: )
+Entering state 6
+Reading a token
+Next token is token '-' (4.1: )
+Shifting token '-' (4.1: )
+Entering state 2
+Reading a token
+Next token is token "number" (4.2: 1)
+Shifting token "number" (4.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (4.2: 1)
+-> $$ = nterm exp (4.2: 1)
+Entering state 10
+Reading a token
+Next token is token '^' (4.3: )
+Shifting token '^' (4.3: )
+Entering state 23
+Reading a token
+Next token is token "number" (4.4: 2)
+Shifting token "number" (4.4: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (4.4: 2)
+-> $$ = nterm exp (4.4: 2)
+Entering state 32
+Reading a token
+Next token is token '=' (4.6: )
+Reducing stack 0 by rule 12 (line 103):
+   $1 = nterm exp (4.2: 1)
+   $2 = token '^' (4.3: )
+   $3 = nterm exp (4.4: 2)
+-> $$ = nterm exp (4.2-4: 1)
+Entering state 10
+Next token is token '=' (4.6: )
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' (4.1: )
+   $2 = nterm exp (4.2-4: 1)
+-> $$ = nterm exp (4.1-4: -1)
+Entering state 8
+Next token is token '=' (4.6: )
+Shifting token '=' (4.6: )
+Entering state 18
+Reading a token
+Next token is token '-' (4.8: )
+Shifting token '-' (4.8: )
+Entering state 2
+Reading a token
+Next token is token "number" (4.9: 1)
+Shifting token "number" (4.9: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (4.9: 1)
+-> $$ = nterm exp (4.9: 1)
+Entering state 10
+Reading a token
+Next token is token '\n' (4.10-5.0: )
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' (4.8: )
+   $2 = nterm exp (4.9: 1)
+-> $$ = nterm exp (4.8-9: -1)
+Entering state 27
+Next token is token '\n' (4.10-5.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (4.1-4: -1)
+   $2 = token '=' (4.6: )
+   $3 = nterm exp (4.8-9: -1)
+-> $$ = nterm exp (4.1-9: -1)
+Entering state 8
+Next token is token '\n' (4.10-5.0: )
+Shifting token '\n' (4.10-5.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (4.1-9: -1)
+   $2 = token '\n' (4.10-5.0: )
+-> $$ = nterm line (4.1-5.0: )
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input (1.1-4.0: )
+   $2 = nterm line (4.1-5.0: )
+-> $$ = nterm input (1.1-5.0: )
+Entering state 6
+Reading a token
+Next token is token '(' (5.1: )
+Shifting token '(' (5.1: )
+Entering state 4
+Reading a token
+Next token is token '-' (5.2: )
+Shifting token '-' (5.2: )
+Entering state 2
+Reading a token
+Next token is token "number" (5.3: 1)
+Shifting token "number" (5.3: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (5.3: 1)
+-> $$ = nterm exp (5.3: 1)
+Entering state 10
+Reading a token
+Next token is token ')' (5.4: )
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' (5.2: )
+   $2 = nterm exp (5.3: 1)
+-> $$ = nterm exp (5.2-3: -1)
+Entering state 12
+Next token is token ')' (5.4: )
+Shifting token ')' (5.4: )
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' (5.1: )
+   $2 = nterm exp (5.2-3: -1)
+   $3 = token ')' (5.4: )
+-> $$ = nterm exp (5.1-4: -1)
+Entering state 8
+Reading a token
+Next token is token '^' (5.5: )
+Shifting token '^' (5.5: )
+Entering state 23
+Reading a token
+Next token is token "number" (5.6: 2)
+Shifting token "number" (5.6: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (5.6: 2)
+-> $$ = nterm exp (5.6: 2)
+Entering state 32
+Reading a token
+Next token is token '=' (5.8: )
+Reducing stack 0 by rule 12 (line 103):
+   $1 = nterm exp (5.1-4: -1)
+   $2 = token '^' (5.5: )
+   $3 = nterm exp (5.6: 2)
+-> $$ = nterm exp (5.1-6: 1)
+Entering state 8
+Next token is token '=' (5.8: )
+Shifting token '=' (5.8: )
+Entering state 18
+Reading a token
+Next token is token "number" (5.10: 1)
+Shifting token "number" (5.10: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (5.10: 1)
+-> $$ = nterm exp (5.10: 1)
+Entering state 27
+Reading a token
+Next token is token '\n' (5.11-6.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (5.1-6: 1)
+   $2 = token '=' (5.8: )
+   $3 = nterm exp (5.10: 1)
+-> $$ = nterm exp (5.1-10: 1)
+Entering state 8
+Next token is token '\n' (5.11-6.0: )
+Shifting token '\n' (5.11-6.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (5.1-10: 1)
+   $2 = token '\n' (5.11-6.0: )
+-> $$ = nterm line (5.1-6.0: )
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input (1.1-5.0: )
+   $2 = nterm line (5.1-6.0: )
+-> $$ = nterm input (1.1-6.0: )
+Entering state 6
+Reading a token
+Next token is token '\n' (6.1-7.0: )
+Shifting token '\n' (6.1-7.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 74):
+   $1 = token '\n' (6.1-7.0: )
+-> $$ = nterm line (6.1-7.0: )
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input (1.1-6.0: )
+   $2 = nterm line (6.1-7.0: )
+-> $$ = nterm input (1.1-7.0: )
+Entering state 6
+Reading a token
+Next token is token '-' (7.1: )
+Shifting token '-' (7.1: )
+Entering state 2
+Reading a token
+Next token is token '-' (7.2: )
+Shifting token '-' (7.2: )
+Entering state 2
+Reading a token
+Next token is token '-' (7.3: )
+Shifting token '-' (7.3: )
+Entering state 2
+Reading a token
+Next token is token "number" (7.4: 1)
+Shifting token "number" (7.4: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (7.4: 1)
+-> $$ = nterm exp (7.4: 1)
+Entering state 10
+Reading a token
+Next token is token '=' (7.6: )
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' (7.3: )
+   $2 = nterm exp (7.4: 1)
+-> $$ = nterm exp (7.3-4: -1)
+Entering state 10
+Next token is token '=' (7.6: )
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' (7.2: )
+   $2 = nterm exp (7.3-4: -1)
+-> $$ = nterm exp (7.2-4: 1)
+Entering state 10
+Next token is token '=' (7.6: )
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' (7.1: )
+   $2 = nterm exp (7.2-4: 1)
+-> $$ = nterm exp (7.1-4: -1)
+Entering state 8
+Next token is token '=' (7.6: )
+Shifting token '=' (7.6: )
+Entering state 18
+Reading a token
+Next token is token '-' (7.8: )
+Shifting token '-' (7.8: )
+Entering state 2
+Reading a token
+Next token is token "number" (7.9: 1)
+Shifting token "number" (7.9: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (7.9: 1)
+-> $$ = nterm exp (7.9: 1)
+Entering state 10
+Reading a token
+Next token is token '\n' (7.10-8.0: )
+Reducing stack 0 by rule 11 (line 102):
+   $1 = token '-' (7.8: )
+   $2 = nterm exp (7.9: 1)
+-> $$ = nterm exp (7.8-9: -1)
+Entering state 27
+Next token is token '\n' (7.10-8.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (7.1-4: -1)
+   $2 = token '=' (7.6: )
+   $3 = nterm exp (7.8-9: -1)
+-> $$ = nterm exp (7.1-9: -1)
+Entering state 8
+Next token is token '\n' (7.10-8.0: )
+Shifting token '\n' (7.10-8.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (7.1-9: -1)
+   $2 = token '\n' (7.10-8.0: )
+-> $$ = nterm line (7.1-8.0: )
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input (1.1-7.0: )
+   $2 = nterm line (7.1-8.0: )
+-> $$ = nterm input (1.1-8.0: )
+Entering state 6
+Reading a token
+Next token is token '\n' (8.1-9.0: )
+Shifting token '\n' (8.1-9.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 74):
+   $1 = token '\n' (8.1-9.0: )
+-> $$ = nterm line (8.1-9.0: )
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input (1.1-8.0: )
+   $2 = nterm line (8.1-9.0: )
+-> $$ = nterm input (1.1-9.0: )
+Entering state 6
+Reading a token
+Next token is token "number" (9.1: 1)
+Shifting token "number" (9.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (9.1: 1)
+-> $$ = nterm exp (9.1: 1)
+Entering state 8
+Reading a token
+Next token is token '-' (9.3: )
+Shifting token '-' (9.3: )
+Entering state 19
+Reading a token
+Next token is token "number" (9.5: 2)
+Shifting token "number" (9.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (9.5: 2)
+-> $$ = nterm exp (9.5: 2)
+Entering state 28
+Reading a token
+Next token is token '-' (9.7: )
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (9.1: 1)
+   $2 = token '-' (9.3: )
+   $3 = nterm exp (9.5: 2)
+-> $$ = nterm exp (9.1-5: -1)
+Entering state 8
+Next token is token '-' (9.7: )
+Shifting token '-' (9.7: )
+Entering state 19
+Reading a token
+Next token is token "number" (9.9: 3)
+Shifting token "number" (9.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (9.9: 3)
+-> $$ = nterm exp (9.9: 3)
+Entering state 28
+Reading a token
+Next token is token '=' (9.11: )
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (9.1-5: -1)
+   $2 = token '-' (9.7: )
+   $3 = nterm exp (9.9: 3)
+-> $$ = nterm exp (9.1-9: -4)
+Entering state 8
+Next token is token '=' (9.11: )
+Shifting token '=' (9.11: )
+Entering state 18
+Reading a token
+Next token is token '-' (9.13: )
+Shifting token '-' (9.13: )
 Entering state 2
 Reading a token
 Next token is token "number" (9.14: 4)
@@ -219527,8 +219518,8 @@
 Entering state 16
 Cleanup: popping token "end of input" (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
-601. calc.at:1557: ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
- skipped (calc.at:1557)
+589. calc.at:1544: ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+ skipped (calc.at:1544)
 stderr:
 Starting parse
 Entering state 0
@@ -219829,7 +219820,7 @@
 Shifting token ')' (5.4: )
 Entering state 26
 Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' (5.1: )
+   $1 = token '(' (590. calc.at:1545: 5.1: )
    $2 = nterm exp (5.2-3: -1)
    $3 = token ')' (5.4: )
 -> $$ = nterm exp (5.1-4: -1)
@@ -220366,8 +220357,9 @@
 Entering state 16
 Cleanup: popping token "end of input" (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
-input:
+ skipped (calc.at:1545)
 
+input:
   | 1 2
 ./calc.at:1492:  $PREPARSER ./calc  input
 stderr:
@@ -220386,10 +220378,11 @@
 1.3: syntax error, unexpected number
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token "number" (1.3: 2)
+
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+591. calc.at:1546: testing Calculator Java parse.error=detailed  ...
+./calc.at:1546: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
 stderr:
-602. calc.at:1560: testing Calculator Java parse.trace parse.error=custom %locations parse.lac=full  ...
-./calc.at:1560: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
 Starting parse
 Entering state 0
 Reading a token
@@ -220405,7 +220398,6 @@
 1.3: syntax error, unexpected number
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token "number" (1.3: 2)
-603. calc.at:1561: testing Calculator Java parse.trace parse.error=custom %locations api.push-pull=both parse.lac=full  ...
 ./calc.at:1492: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -220416,12 +220408,15 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1561: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
 ./calc.at:1492: cat stderr
+592. calc.at:1547: testing Calculator Java parse.error=verbose  ...
+./calc.at:1547: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
 input:
   | 1//2
 ./calc.at:1492:  $PREPARSER ./calc  input
+593. calc.at:1548: testing Calculator Java %locations parse.error=custom  ...
 stderr:
+./calc.at:1548: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
 Starting parse
 Entering state 0
 Reading a token
@@ -220443,9 +220438,8 @@
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '/' (1.3: )
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-604. torture.at:132: testing Big triangle ...
 stderr:
-Starting parse
+591. calc.at:1546: Starting parse
 Entering state 0
 Reading a token
 Next token is token "number" (1.1: 1)
@@ -220465,7 +220459,7 @@
 Error: popping token '/' (1.2: )
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '/' (1.3: )
-./torture.at:138: "$PERL" -w ./gengram.pl 200 || exit 77
+ skipped (calc.at:1546)
 ./calc.at:1492: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -220476,15 +220470,15 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-602. calc.at:1560:  skipped (calc.at:1560)
 ./calc.at:1492: cat stderr
-603. calc.at:1561:  skipped (calc.at:1561)
+592. calc.at:1547:  skipped (calc.at:1547)
 
 input:
-  | error
-./calc.at:1492:  $PREPARSER ./calc  input
 
+593. calc.at:1548:   | error
+./calc.at:1492:  $PREPARSER ./calc  input
 stderr:
+ skipped (calc.at:1548)
 Starting parse
 Entering state 0
 Reading a token
@@ -220493,12 +220487,15 @@
 Cleanup: discarding lookahead token "invalid token" (1.1: )
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+
 Starting parse
 Entering state 0
 Reading a token
 Next token is token "invalid token" (1.1: )
 1.1: syntax error, unexpected invalid token
 Cleanup: discarding lookahead token "invalid token" (1.1: )
+594. calc.at:1549: testing Calculator Java %locations parse.error=detailed  ...
+./calc.at:1549: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
 ./calc.at:1492: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -220510,11 +220507,11 @@
   }eg
 ' expout || exit 77
 ./calc.at:1492: cat stderr
+595. calc.at:1550: testing Calculator Java %locations parse.error=verbose  ...
 input:
-605. torture.at:216: testing Big horizontal ...
+./calc.at:1550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
   | 1 = 2 = 3
 ./calc.at:1492:  $PREPARSER ./calc  input
-./torture.at:230: "$PERL" -w ./gengram.pl 1000 || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -220576,6 +220573,8 @@
 Error: popping token '=' (1.3: )
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '=' (1.7: )
+596. calc.at:1551: testing Calculator Java parse.trace parse.error=verbose  ...
+594. calc.at:1549: ./calc.at:1551: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
 ./calc.at:1492: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -220586,19 +220585,16 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1492: cat stderr
+ skipped (calc.at:1549)
+595. calc.at:1550: ./calc.at:1492: cat stderr
+ skipped (calc.at:1550)
 input:
-606. torture.at:270: testing State number type: 128 states ...
-./torture.at:270: ruby $abs_top_srcdir/tests/linear 128 >input.y || exit 77
+
+
   | 
   | +1
 ./calc.at:1492:  $PREPARSER ./calc  input
---- /dev/null	2023-05-17 22:25:16.000000000 -1200
-+++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/606/stderr	2023-05-18 03:35:15.096147455 -1200
-@@ -0,0 +1 @@
-+/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/606/test-source: line 14: ruby: command not found
 stderr:
-606. torture.at:270:  skipped (torture.at:270)
 Starting parse
 Entering state 0
 Reading a token
@@ -220620,8 +220616,7 @@
 Cleanup: discarding lookahead token '+' (2.1: )
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-
-Starting parse
+596. calc.at:1551: Starting parse
 Entering state 0
 Reading a token
 Next token is token '\n' (1.1-2.0: )
@@ -220640,1144 +220635,9 @@
 2.1: syntax error, unexpected '+'
 Error: popping nterm input (1.1-2.0: )
 Cleanup: discarding lookahead token '+' (2.1: )
-stdout:
-%code top { /* -*- c -*- */
-/* Adjust to the compiler.
-  We used to do it here, but each time we add a new line,
-  we have to adjust all the line numbers in error messages.
-  It's simpler to use a constant include to a varying file.  */
-#include <testsuite.h>
-}
-
-%define parse.error verbose
-%debug
-%{
-#include <stdio.h>
-#include <stdlib.h>
-#define MAX 1000
-static int yylex (void);
-#include <stdio.h>
-
-/* !POSIX */ static void yyerror (const char *msg);
-%}
-
-%token
-    t1 1 "1"
-    t2 2 "2"
-    t3 3 "3"
-    t4 4 "4"
-    t5 5 "5"
-    t6 6 "6"
-    t7 7 "7"
-    t8 8 "8"
-    t9 9 "9"
-    t10 10 "10"
-    t11 11 "11"
-    t12 12 "12"
-    t13 13 "13"
-    t14 14 "14"
-    t15 15 "15"
-    t16 16 "16"
-    t17 17 "17"
-    t18 18 "18"
-    t19 19 "19"
-    t20 20 "20"
-    t21 21 "21"
-    t22 22 "22"
-    t23 23 "23"
-    t24 24 "24"
-    t25 25 "25"
-    t26 26 "26"
-    t27 27 "27"
-    t28 28 "28"
-    t29 29 "29"
-    t30 30 "30"
-    t31 31 "31"
-    t32 32 "32"
-    t33 33 "33"
-    t34 34 "34"
-    t35 35 "35"
-    t36 36 "36"
-    t37 37 "37"
-    t38 38 "38"
-    t39 39 "39"
-    t40 40 "40"
-    t41 41 "41"
-    t42 42 "42"
-    t43 43 "43"
-    t44 44 "44"
-    t45 45 "45"
-    t46 46 "46"
-    t47 47 "47"
-    t48 48 "48"
-    t49 49 "49"
-    t50 50 "50"
-    t51 51 "51"
-    t52 52 "52"
-    t53 53 "53"
-    t54 54 "54"
-    t55 55 "55"
-    t56 56 "56"
-    t57 57 "57"
-    t58 58 "58"
-    t59 59 "59"
-    t60 60 "60"
-    t61 61 "61"
-    t62 62 "62"
-    t63 63 "63"
-    t64 64 "64"
-    t65 65 "65"
-    t66 66 "66"
-    t67 67 "67"
-    t68 68 "68"
-    t69 69 "69"
-    t70 70 "70"
-    t71 71 "71"
-    t72 72 "72"
-    t73 73 "73"
-    t74 74 "74"
-    t75 75 "75"
-    t76 76 "76"
-    t77 77 "77"
-    t78 78 "78"
-    t79 79 "79"
-    t80 80 "80"
-    t81 81 "81"
-    t82 82 "82"
-    t83 83 "83"
-    t84 84 "84"
-    t85 85 "85"
-    t86 86 "86"
-    t87 87 "87"
-    t88 88 "88"
-    t89 89 "89"
-    t90 90 "90"
-    t91 91 "91"
-    t92 92 "92"
-    t93 93 "93"
-    t94 94 "94"
-    t95 95 "95"
-    t96 96 "96"
-    t97 97 "97"
-    t98 98 "98"
-    t99 99 "99"
-    t100 100 "100"
-    t101 101 "101"
-    t102 102 "102"
-    t103 103 "103"
-    t104 104 "104"
-    t105 105 "105"
-    t106 106 "106"
-    t107 107 "107"
-    t108 108 "108"
-    t109 109 "109"
-    t110 110 "110"
-    t111 111 "111"
-    t112 112 "112"
-    t113 113 "113"
-    t114 114 "114"
-    t115 115 "115"
-    t116 116 "116"
-    t117 117 "117"
-    t118 118 "118"
-    t119 119 "119"
-    t120 120 "120"
-    t121 121 "121"
-    t122 122 "122"
-    t123 123 "123"
-    t124 124 "124"
-    t125 125 "125"
-    t126 126 "126"
-    t127 127 "127"
-    t128 128 "128"
-    t129 129 "129"
-    t130 130 "130"
-    t131 131 "131"
-    t132 132 "132"
-    t133 133 "133"
-    t134 134 "134"
-    t135 135 "135"
-    t136 136 "136"
-    t137 137 "137"
-    t138 138 "138"
-    t139 139 "139"
-    t140 140 "140"
-    t141 141 "141"
-    t142 142 "142"
-    t143 143 "143"
-    t144 144 "144"
-    t145 145 "145"
-    t146 146 "146"
-    t147 147 "147"
-    t148 148 "148"
-    t149 149 "149"
-    t150 150 "150"
-    t151 151 "151"
-    t152 152 "152"
-    t153 153 "153"
-    t154 154 "154"
-    t155 155 "155"
-    t156 156 "156"
-    t157 157 "157"
-    t158 158 "158"
-    t159 159 "159"
-    t160 160 "160"
-    t161 161 "161"
-    t162 162 "162"
-    t163 163 "163"
-    t164 164 "164"
-    t165 165 "165"
-    t166 166 "166"
-    t167 167 "167"
-    t168 168 "168"
-    t169 169 "169"
-    t170 170 "170"
-    t171 171 "171"
-    t172 172 "172"
-    t173 173 "173"
-    t174 174 "174"
-    t175 175 "175"
-    t176 176 "176"
-    t177 177 "177"
-    t178 178 "178"
-    t179 179 "179"
-    t180 180 "180"
-    t181 181 "181"
-    t182 182 "182"
-    t183 183 "183"
-    t184 184 "184"
-    t185 185 "185"
-    t186 186 "186"
-    t187 187 "187"
-    t188 188 "188"
-    t189 189 "189"
-    t190 190 "190"
-    t191 191 "191"
-    t192 192 "192"
-    t193 193 "193"
-    t194 194 "194"
-    t195 195 "195"
-    t196 196 "196"
-    t197 197 "197"
-    t198 198 "198"
-    t199 199 "199"
-    t200 200 "200"
-    t201 201 "201"
-    t202 202 "202"
-    t203 203 "203"
-    t204 204 "204"
-    t205 205 "205"
-    t206 206 "206"
-    t207 207 "207"
-    t208 208 "208"
-    t209 209 "209"
-    t210 210 "210"
-    t211 211 "211"
-    t212 212 "212"
-    t213 213 "213"
-    t214 214 "214"
-    t215 215 "215"
-    t216 216 "216"
-    t217 217 "217"
-    t218 218 "218"
-    t219 219 "219"
-    t220 220 "220"
-    t221 221 "221"
-    t222 222 "222"
-    t223 223 "223"
-    t224 224 "224"
-    t225 225 "225"
-    t226 226 "226"
-    t227 227 "227"
-    t228 228 "228"
-    t229 229 "229"
-    t230 230 "230"
-    t231 231 "231"
-    t232 232 "232"
-    t233 233 "233"
-    t234 234 "234"
-    t235 235 "235"
-    t236 236 "236"
-    t237 237 "237"
-    t238 238 "238"
-    t239 239 "239"
-    t240 240 "240"
-    t241 241 "241"
-    t242 242 "242"
-    t243 243 "243"
-    t244 244 "244"
-    t245 245 "245"
-    t246 246 "246"
-    t247 247 "247"
-    t248 248 "248"
-    t249 249 "249"
-    t250 250 "250"
-    t251 251 "251"
-    t252 252 "252"
-    t253 253 "253"
-    t254 254 "254"
-    t255 255 "255"
-    t256 256 "256"
-    t257 257 "257"
-    t258 258 "258"
-    t259 259 "259"
-    t260 260 "260"
-    t261 261 "261"
-    t262 262 "262"
-    t263 263 "263"
-    t264 264 "264"
-    t265 265 "265"
-    t266 266 "266"
-    t267 267 "267"
-    t268 268 "268"
-    t269 269 "269"
-    t270 270 "270"
-    t271 271 "271"
-    t272 272 "272"
-    t273 273 "273"
-    t274 274 "274"
-    t275 275 "275"
-    t276 276 "276"
-    t277 277 "277"
-    t278 278 "278"
-    t279 279 "279"
-    t280 280 "280"
-    t281 281 "281"
-    t282 282 "282"
-    t283 283 "283"
-    t284 284 "284"
-    t285 285 "285"
-    t286 286 "286"
-    t287 287 "287"
-    t288 288 "288"
-    t289 289 "289"
-    t290 290 "290"
-    t291 291 "291"
-    t292 292 "292"
-    t293 293 "293"
-    t294 294 "294"
-    t295 295 "295"
-    t296 296 "296"
-    t297 297 "297"
-    t298 298 "298"
-    t299 299 "299"
-    t300 300 "300"
-    t301 301 "301"
-    t302 302 "302"
-    t303 303 "303"
-    t304 304 "304"
-    t305 305 "305"
-    t306 306 "306"
-    t307 307 "307"
-    t308 308 "308"
-    t309 309 "309"
-    t310 310 "310"
-    t311 311 "311"
-    t312 312 "312"
-    t313 313 "313"
-    t314 314 "314"
-    t315 315 "315"
-    t316 316 "316"
-    t317 317 "317"
-    t318 318 "318"
-    t319 319 "319"
-    t320 320 "320"
-    t321 321 "321"
-    t322 322 "322"
-    t323 323 "323"
-    t324 324 "324"
-    t325 325 "325"
-    t326 326 "326"
-    t327 327 "327"
-    t328 328 "328"
-    t329 329 "329"
-    t330 330 "330"
-    t331 331 "331"
-    t332 332 "332"
-    t333 333 "333"
-    t334 334 "334"
-    t335 335 "335"
-    t336 336 "336"
-    t337 337 "337"
-    t338 338 "338"
-    t339 339 "339"
-    t340 340 "340"
-    t341 341 "341"
-    t342 342 "342"
-    t343 343 "343"
-    t344 344 "344"
-    t345 345 "345"
-    t346 346 "346"
-    t347 347 "347"
-    t348 348 "348"
-    t349 349 "349"
-    t350 350 "350"
-    t351 351 "351"
-    t352 352 "352"
-    t353 353 "353"
-    t354 354 "354"
-    t355 355 "355"
-    t356 356 "356"
-    t357 357 "357"
-    t358 358 "358"
-    t359 359 "359"
-    t360 360 "360"
-    t361 361 "361"
-    t362 362 "362"
-    t363 363 "363"
-    t364 364 "364"
-    t365 365 "365"
-    t366 366 "366"
-    t367 367 "367"
-    t368 368 "368"
-    t369 369 "369"
-    t370 370 "370"
-    t371 371 "371"
-    t372 372 "372"
-    t373 373 "373"
-    t374 374 "374"
-    t375 375 "375"
-    t376 376 "376"
-    t377 377 "377"
-    t378 378 "378"
-    t379 379 "379"
-    t380 380 "380"
-    t381 381 "381"
-    t382 382 "382"
-    t383 383 "383"
-    t384 384 "384"
-    t385 385 "385"
-    t386 386 "386"
-    t387 387 "387"
-    t388 388 "388"
-    t389 389 "389"
-    t390 390 "390"
-    t391 391 "391"
-    t392 392 "392"
-    t393 393 "393"
-    t394 394 "394"
-    t395 395 "395"
-    t396 396 "396"
-    t397 397 "397"
-    t398 398 "398"
-    t399 399 "399"
-    t400 400 "400"
-    t401 401 "401"
-    t402 402 "402"
-    t403 403 "403"
-    t404 404 "404"
-    t405 405 "405"
-    t406 406 "406"
-    t407 407 "407"
-    t408 408 "408"
-    t409 409 "409"
-    t410 410 "410"
-    t411 411 "411"
-    t412 412 "412"
-    t413 413 "413"
-    t414 414 "414"
-    t415 415 "415"
-    t416 416 "416"
-    t417 417 "417"
-    t418 418 "418"
-    t419 419 "419"
-    t420 420 "420"
-    t421 421 "421"
-    t422 422 "422"
-    t423 423 "423"
-    t424 424 "424"
-    t425 425 "425"
-    t426 426 "426"
-    t427 427 "427"
-    t428 428 "428"
-    t429 429 "429"
-    t430 430 "430"
-    t431 431 "431"
-    t432 432 "432"
-    t433 433 "433"
-    t434 434 "434"
-    t435 435 "435"
-    t436 436 "436"
-    t437 437 "437"
-    t438 438 "438"
-    t439 439 "439"
-    t440 440 "440"
-    t441 441 "441"
-    t442 442 "442"
-    t443 443 "443"
-    t444 444 "444"
-    t445 445 "445"
-    t446 446 "446"
-    t447 447 "447"
-    t448 448 "448"
-    t449 449 "449"
-    t450 450 "450"
-    t451 451 "451"
-    t452 452 "452"
-    t453 453 "453"
-    t454 454 "454"
-    t455 455 "455"
-    t456 456 "456"
-    t457 457 "457"
-    t458 458 "458"
-    t459 459 "459"
-    t460 460 "460"
-    t461 461 "461"
-    t462 462 "462"
-    t463 463 "463"
-    t464 464 "464"
-    t465 465 "465"
-    t466 466 "466"
-    t467 467 "467"
-    t468 468 "468"
-    t469 469 "469"
-    t470 470 "470"
-    t471 471 "471"
-    t472 472 "472"
-    t473 473 "473"
-    t474 474 "474"
-    t475 475 "475"
-    t476 476 "476"
-    t477 477 "477"
-    t478 478 "478"
-    t479 479 "479"
-    t480 480 "480"
-    t481 481 "481"
-    t482 482 "482"
-    t483 483 "483"
-    t484 484 "484"
-    t485 485 "485"
-    t486 486 "486"
-    t487 487 "487"
-    t488 488 "488"
-    t489 489 "489"
-    t490 490 "490"
-    t491 491 "491"
-    t492 492 "492"
-    t493 493 "493"
-    t494 494 "494"
-    t495 495 "495"
-    t496 496 "496"
-    t497 497 "497"
-    t498 498 "498"
-    t499 499 "499"
-    t500 500 "500"
-    t501 501 "501"
-    t502 502 "502"
-    t503 503 "503"
-    t504 504 "504"
-    t505 505 "505"
-    t506 506 "506"
-    t507 507 "507"
-    t508 508 "508"
-    t509 509 "509"
-    t510 510 "510"
-    t511 511 "511"
-    t512 512 "512"
-    t513 513 "513"
-    t514 514 "514"
-    t515 515 "515"
-    t516 516 "516"
-    t517 517 "517"
-    t518 518 "518"
-    t519 519 "519"
-    t520 520 "520"
-    t521 521 "521"
-    t522 522 "522"
-    t523 523 "523"
-    t524 524 "524"
-    t525 525 "525"
-    t526 526 "526"
-    t527 527 "527"
-    t528 528 "528"
-    t529 529 "529"
-    t530 530 "530"
-    t531 531 "531"
-    t532 532 "532"
-    t533 533 "533"
-    t534 534 "534"
-    t535 535 "535"
-    t536 536 "536"
-    t537 537 "537"
-    t538 538 "538"
-    t539 539 "539"
-    t540 540 "540"
-    t541 541 "541"
-    t542 542 "542"
-    t543 543 "543"
-    t544 544 "544"
-    t545 545 "545"
-    t546 546 "546"
-    t547 547 "547"
-    t548 548 "548"
-    t549 549 "549"
-    t550 550 "550"
-    t551 551 "551"
-    t552 552 "552"
-    t553 553 "553"
-    t554 554 "554"
-    t555 555 "555"
-    t556 556 "556"
-    t557 557 "557"
-    t558 558 "558"
-    t559 559 "559"
-    t560 560 "560"
-    t561 561 "561"
-    t562 562 "562"
-    t563 563 "563"
-    t564 564 "564"
-    t565 565 "565"
-    t566 566 "566"
-    t567 567 "567"
-    t568 568 "568"
-    t569 569 "569"
-    t570 570 "570"
-    t571 571 "571"
-    t572 572 "572"
-    t573 573 "573"
-    t574 574 "574"
-    t575 575 "575"
-    t576 576 "576"
-    t577 577 "577"
-    t578 578 "578"
-    t579 579 "579"
-    t580 580 "580"
-    t581 581 "581"
-    t582 582 "582"
-    t583 583 "583"
-    t584 584 "584"
-    t585 585 "585"
-    t586 586 "586"
-    t587 587 "587"
-    t588 588 "588"
-    t589 589 "589"
-    t590 590 "590"
-    t591 591 "591"
-    t592 592 "592"
-    t593 593 "593"
-    t594 594 "594"
-    t595 595 "595"
-    t596 596 "596"
-    t597 597 "597"
-    t598 598 "598"
-    t599 599 "599"
-    t600 600 "600"
-    t601 601 "601"
-    t602 602 "602"
-    t603 603 "603"
-    t604 604 "604"
-    t605 605 "605"
-    t606 606 "606"
-    t607 607 "607"
-    t608 608 "608"
-    t609 609 "609"
-    t610 610 "610"
-    t611 611 "611"
-    t612 612 "612"
-    t613 613 "613"
-    t614 614 "614"
-    t615 615 "615"
-    t616 616 "616"
-    t617 617 "617"
-    t618 618 "618"
-    t619 619 "619"
-    t620 620 "620"
-    t621 621 "621"
-    t622 622 "622"
-    t623 623 "623"
-    t624 624 "624"
-    t625 625 "625"
-    t626 626 "626"
-    t627 627 "627"
-    t628 628 "628"
-    t629 629 "629"
-    t630 630 "630"
-    t631 631 "631"
-    t632 632 "632"
-    t633 633 "633"
-    t634 634 "634"
-    t635 635 "635"
-    t636 636 "636"
-    t637 637 "637"
-    t638 638 "638"
-    t639 639 "639"
-    t640 640 "640"
-    t641 641 "641"
-    t642 642 "642"
-    t643 643 "643"
-    t644 644 "644"
-    t645 645 "645"
-    t646 646 "646"
-    t647 647 "647"
-    t648 648 "648"
-    t649 649 "649"
-    t650 650 "650"
-    t651 651 "651"
-    t652 652 "652"
-    t653 653 "653"
-    t654 654 "654"
-    t655 655 "655"
-    t656 656 "656"
-    t657 657 "657"
-    t658 658 "658"
-    t659 659 "659"
-    t660 660 "660"
-    t661 661 "661"
-    t662 662 "662"
-    t663 663 "663"
-    t664 664 "664"
-    t665 665 "665"
-    t666 666 "666"
-    t667 667 "667"
-    t668 668 "668"
-    t669 669 "669"
-    t670 670 "670"
-    t671 671 "671"
-    t672 672 "672"
-    t673 673 "673"
-    t674 674 "674"
-    t675 675 "675"
-    t676 676 "676"
-    t677 677 "677"
-    t678 678 "678"
-    t679 679 "679"
-    t680 680 "680"
-    t681 681 "681"
-    t682 682 "682"
-    t683 683 "683"
-    t684 684 "684"
-    t685 685 "685"
-    t686 686 "686"
-    t687 687 "687"
-    t688 688 "688"
-    t689 689 "689"
-    t690 690 "690"
-    t691 691 "691"
-    t692 692 "692"
-    t693 693 "693"
-    t694 694 "694"
-    t695 695 "695"
-    t696 696 "696"
-    t697 697 "697"
-    t698 698 "698"
-    t699 699 "699"
-    t700 700 "700"
-    t701 701 "701"
-    t702 702 "702"
-    t703 703 "703"
-    t704 704 "704"
-    t705 705 "705"
-    t706 706 "706"
-    t707 707 "707"
-    t708 708 "708"
-    t709 709 "709"
-    t710 710 "710"
-    t711 711 "711"
-    t712 712 "712"
-    t713 713 "713"
-    t714 714 "714"
-    t715 715 "715"
-    t716 716 "716"
-    t717 717 "717"
-    t718 718 "718"
-    t719 719 "719"
-    t720 720 "720"
-    t721 721 "721"
-    t722 722 "722"
-    t723 723 "723"
-    t724 724 "724"
-    t725 725 "725"
-    t726 726 "726"
-    t727 727 "727"
-    t728 728 "728"
-    t729 729 "729"
-    t730 730 "730"
-    t731 731 "731"
-    t732 732 "732"
-    t733 733 "733"
-    t734 734 "734"
-    t735 735 "735"
-    t736 736 "736"
-    t737 737 "737"
-    t738 738 "738"
-    t739 739 "739"
-    t740 740 "740"
-    t741 741 "741"
-    t742 742 "742"
-    t743 743 "743"
-    t744 744 "744"
-    t745 745 "745"
-    t746 746 "746"
-    t747 747 "747"
-    t748 748 "748"
-    t749 749 "749"
-    t750 750 "750"
-    t751 751 "751"
-    t752 752 "752"
-    t753 753 "753"
-    t754 754 "754"
-    t755 755 "755"
-    t756 756 "756"
-    t757 757 "757"
-    t758 758 "758"
-    t759 759 "759"
-    t760 760 "760"
-    t761 761 "761"
-    t762 762 "762"
-    t763 763 "763"
-    t764 764 "764"
-    t765 765 "765"
-    t766 766 "766"
-    t767 767 "767"
-    t768 768 "768"
-    t769 769 "769"
-    t770 770 "770"
-    t771 771 "771"
-    t772 772 "772"
-    t773 773 "773"
-    t774 774 "774"
-    t775 775 "775"
-    t776 776 "776"
-    t777 777 "777"
-    t778 778 "778"
-    t779 779 "779"
-    t780 780 "780"
-    t781 781 "781"
-    t782 782 "782"
-    t783 783 "783"
-    t784 784 "784"
-    t785 785 "785"
-    t786 786 "786"
-    t787 787 "787"
-    t788 788 "788"
-    t789 789 "789"
-    t790 790 "790"
-    t791 791 "791"
-    t792 792 "792"
-    t793 793 "793"
-    t794 794 "794"
-    t795 795 "795"
-    t796 796 "796"
-    t797 797 "797"
-    t798 798 "798"
-    t799 799 "799"
-    t800 800 "800"
-    t801 801 "801"
-    t802 802 "802"
-    t803 803 "803"
-    t804 804 "804"
-    t805 805 "805"
-    t806 806 "806"
-    t807 807 "807"
-    t808 808 "808"
-    t809 809 "809"
-    t810 810 "810"
-    t811 811 "811"
-    t812 812 "812"
-    t813 813 "813"
-    t814 814 "814"
-    t815 815 "815"
-    t816 816 "816"
-    t817 817 "817"
-    t818 818 "818"
-    t819 819 "819"
-    t820 820 "820"
-    t821 821 "821"
-    t822 822 "822"
-    t823 823 "823"
-    t824 824 "824"
-    t825 825 "825"
-    t826 826 "826"
-    t827 827 "827"
-    t828 828 "828"
-    t829 829 "829"
-    t830 830 "830"
-    t831 831 "831"
-    t832 832 "832"
-    t833 833 "833"
-    t834 834 "834"
-    t835 835 "835"
-    t836 836 "836"
-    t837 837 "837"
-    t838 838 "838"
-    t839 839 "839"
-    t840 840 "840"
-    t841 841 "841"
-    t842 842 "842"
-    t843 843 "843"
-    t844 844 "844"
-    t845 845 "845"
-    t846 846 "846"
-    t847 847 "847"
-    t848 848 "848"
-    t849 849 "849"
-    t850 850 "850"
-    t851 851 "851"
-    t852 852 "852"
-    t853 853 "853"
-    t854 854 "854"
-    t855 855 "855"
-    t856 856 "856"
-    t857 857 "857"
-    t858 858 "858"
-    t859 859 "859"
-    t860 860 "860"
-    t861 861 "861"
-    t862 862 "862"
-    t863 863 "863"
-    t864 864 "864"
-    t865 865 "865"
-    t866 866 "866"
-    t867 867 "867"
-    t868 868 "868"
-    t869 869 "869"
-    t870 870 "870"
-    t871 871 "871"
-    t872 872 "872"
-    t873 873 "873"
-    t874 874 "874"
-    t875 875 "875"
-    t876 876 "876"
-    t877 877 "877"
-    t878 878 "878"
-    t879 879 "879"
-    t880 880 "880"
-    t881 881 "881"
-    t882 882 "882"
-    t883 883 "883"
-    t884 884 "884"
-    t885 885 "885"
-    t886 886 "886"
-    t887 887 "887"
-    t888 888 "888"
-    t889 889 "889"
-    t890 890 "890"
-    t891 891 "891"
-    t892 892 "892"
-    t893 893 "893"
-    t894 894 "894"
-    t895 895 "895"
-    t896 896 "896"
-    t897 897 "897"
-    t898 898 "898"
-    t899 899 "899"
-    t900 900 "900"
-    t901 901 "901"
-    t902 902 "902"
-    t903 903 "903"
-    t904 904 "904"
-    t905 905 "905"
-    t906 906 "906"
-    t907 907 "907"
-    t908 908 "908"
-    t909 909 "909"
-    t910 910 "910"
-    t911 911 "911"
-    t912 912 "912"
-    t913 913 "913"
-    t914 914 "914"
-    t915 915 "915"
-    t916 916 "916"
-    t917 917 "917"
-    t918 918 "918"
-    t919 919 "919"
-    t920 920 "920"
-    t921 921 "921"
-    t922 922 "922"
-    t923 923 "923"
-    t924 924 "924"
-    t925 925 "925"
-    t926 926 "926"
-    t927 927 "927"
-    t928 928 "928"
-    t929 929 "929"
-    t930 930 "930"
-    t931 931 "931"
-    t932 932 "932"
-    t933 933 "933"
-    t934 934 "934"
-    t935 935 "935"
-    t936 936 "936"
-    t937 937 "937"
-    t938 938 "938"
-    t939 939 "939"
-    t940 940 "940"
-    t941 941 "941"
-    t942 942 "942"
-    t943 943 "943"
-    t944 944 "944"
-    t945 945 "945"
-    t946 946 "946"
-    t947 947 "947"
-    t948 948 "948"
-    t949 949 "949"
-    t950 950 "950"
-    t951 951 "951"
-    t952 952 "952"
-    t953 953 "953"
-    t954 954 "954"
-    t955 955 "955"
-    t956 956 "956"
-    t957 957 "957"
-    t958 958 "958"
-    t959 959 "959"
-    t960 960 "960"
-    t961 961 "961"
-    t962 962 "962"
-    t963 963 "963"
-    t964 964 "964"
-    t965 965 "965"
-    t966 966 "966"
-    t967 967 "967"
-    t968 968 "968"
-    t969 969 "969"
-    t970 970 "970"
-    t971 971 "971"
-    t972 972 "972"
-    t973 973 "973"
-    t974 974 "974"
-    t975 975 "975"
-    t976 976 "976"
-    t977 977 "977"
-    t978 978 "978"
-    t979 979 "979"
-    t980 980 "980"
-    t981 981 "981"
-    t982 982 "982"
-    t983 983 "983"
-    t984 984 "984"
-    t985 985 "985"
-    t986 986 "986"
-    t987 987 "987"
-    t988 988 "988"
-    t989 989 "989"
-    t990 990 "990"
-    t991 991 "991"
-    t992 992 "992"
-    t993 993 "993"
-    t994 994 "994"
-    t995 995 "995"
-    t996 996 "996"
-    t997 997 "997"
-    t998 998 "998"
-    t999 999 "999"
-    t1000 1000 "1000"
-
-%%
-exp: "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-  "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-  "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-  "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-  "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-  "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-  "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-  "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-  "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-  "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-  "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-  "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-  "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-  "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
-  "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196"
-  "197" "198" "199" "200" "201" "202" "203" "204" "205" "206" "207" "208"
-  "209" "210" "211" "212" "213" "214" "215" "216" "217" "218" "219" "220"
-  "221" "222" "223" "224" "225" "226" "227" "228" "229" "230" "231" "232"
-  "233" "234" "235" "236" "237" "238" "239" "240" "241" "242" "243" "244"
-  "245" "246" "247" "248" "249" "250" "251" "252" "253" "254" "255" "256"
-  "257" "258" "259" "260" "261" "262" "263" "264" "265" "266" "267" "268"
-  "269" "270" "271" "272" "273" "274" "275" "276" "277" "278" "279" "280"
-  "281" "282" "283" "284" "285" "286" "287" "288" "289" "290" "291" "292"
-  "293" "294" "295" "296" "297" "298" "299" "300" "301" "302" "303" "304"
-  "305" "306" "307" "308" "309" "310" "311" "312" "313" "314" "315" "316"
-  "317" "318" "319" "320" "321" "322" "323" "324" "325" "326" "327" "328"
-  "329" "330" "331" "332" "333" "334" "335" "336" "337" "338" "339" "340"
-  "341" "342" "343" "344" "345" "346" "347" "348" "349" "350" "351" "352"
-  "353" "354" "355" "356" "357" "358" "359" "360" "361" "362" "363" "364"
-  "365" "366" "367" "368" "369" "370" "371" "372" "373" "374" "375" "376"
-  "377" "378" "379" "380" "381" "382" "383" "384" "385" "386" "387" "388"
-  "389" "390" "391" "392" "393" "394" "395" "396" "397" "398" "399" "400"
-  "401" "402" "403" "404" "405" "406" "407" "408" "409" "410" "411" "412"
-  "413" "414" "415" "416" "417" "418" "419" "420" "421" "422" "423" "424"
-  "425" "426" "427" "428" "429" "430" "431" "432" "433" "434" "435" "436"
-  "437" "438" "439" "440" "441" "442" "443" "444" "445" "446" "447" "448"
-  "449" "450" "451" "452" "453" "454" "455" "456" "457" "458" "459" "460"
-  "461" "462" "463" "464" "465" "466" "467" "468" "469" "470" "471" "472"
-  "473" "474" "475" "476" "477" "478" "479" "480" "481" "482" "483" "484"
-  "485" "486" "487" "488" "489" "490" "491" "492" "493" "494" "495" "496"
-  "497" "498" "499" "500" "501" "502" "503" "504" "505" "506" "507" "508"
-  "509" "510" "511" "512" "513" "514" "515" "516" "517" "518" "519" "520"
-  "521" "522" "523" "524" "525" "526" "527" "528" "529" "530" "531" "532"
-  "533" "534" "535" "536" "537" "538" "539" "540" "541" "542" "543" "544"
-  "545" "546" "547" "548" "549" "550" "551" "552" "553" "554" "555" "556"
-  "557" "558" "559" "560" "561" "562" "563" "564" "565" "566" "567" "568"
-  "569" "570" "571" "572" "573" "574" "575" "576" "577" "578" "579" "580"
-  "581" "582" "583" "584" "585" "586" "587" "588" "589" "590" "591" "592"
-  "593" "594" "595" "596" "597" "598" "599" "600" "601" "602" "603" "604"
-  "605" "606" "607" "608" "609" "610" "611" "612" "613" "614" "615" "616"
-  "617" "618" "619" "620" "621" "622" "623" "624" "625" "626" "627" "628"
-  "629" "630" "631" "632" "633" "634" "635" "636" "637" "638" "639" "640"
-  "641" "642" "643" "644" "645" "646" "647" "648" "649" "650" "651" "652"
-  "653" "654" "655" "656" "657" "658" "659" "660" "661" "662" "663" "664"
-  "665" "666" "667" "668" "669" "670" "671" "672" "673" "674" "675" "676"
-  "677" "678" "679" "680" "681" "682" "683" "684" "685" "686" "687" "688"
-  "689" "690" "691" "692" "693" "694" "695" "696" "697" "698" "699" "700"
-  "701" "702" "703" "704" "705" "706" "707" "708" "709" "710" "711" "712"
-  "713" "714" "715" "716" "717" "718" "719" "720" "721" "722" "723" "724"
-  "725" "726" "727" "728" "729" "730" "731" "732" "733" "734" "735" "736"
-  "737" "738" "739" "740" "741" "742" "743" "744" "745" "746" "747" "748"
-  "749" "750" "751" "752" "753" "754" "755" "756" "757" "758" "759" "760"
-  "761" "762" "763" "764" "765" "766" "767" "768" "769" "770" "771" "772"
-  "773" "774" "775" "776" "777" "778" "779" "780" "781" "782" "783" "784"
-  "785" "786" "787" "788" "789" "790" "791" "792" "793" "794" "795" "796"
-  "797" "798" "799" "800" "801" "802" "803" "804" "805" "806" "807" "808"
-  "809" "810" "811" "812" "813" "814" "815" "816" "817" "818" "819" "820"
-  "821" "822" "823" "824" "825" "826" "827" "828" "829" "830" "831" "832"
-  "833" "834" "835" "836" "837" "838" "839" "840" "841" "842" "843" "844"
-  "845" "846" "847" "848" "849" "850" "851" "852" "853" "854" "855" "856"
-  "857" "858" "859" "860" "861" "862" "863" "864" "865" "866" "867" "868"
-  "869" "870" "871" "872" "873" "874" "875" "876" "877" "878" "879" "880"
-  "881" "882" "883" "884" "885" "886" "887" "888" "889" "890" "891" "892"
-  "893" "894" "895" "896" "897" "898" "899" "900" "901" "902" "903" "904"
-  "905" "906" "907" "908" "909" "910" "911" "912" "913" "914" "915" "916"
-  "917" "918" "919" "920" "921" "922" "923" "924" "925" "926" "927" "928"
-  "929" "930" "931" "932" "933" "934" "935" "936" "937" "938" "939" "940"
-  "941" "942" "943" "944" "945" "946" "947" "948" "949" "950" "951" "952"
-  "953" "954" "955" "956" "957" "958" "959" "960" "961" "962" "963" "964"
-  "965" "966" "967" "968" "969" "970" "971" "972" "973" "974" "975" "976"
-  "977" "978" "979" "980" "981" "982" "983" "984" "985" "986" "987" "988"
-  "989" "990" "991" "992" "993" "994" "995" "996" "997" "998" "999" "1000"
-  ;
-%%
-#include <assert.h>
-
-
-
-
-/* A C error reporting function.  */
-/* !POSIX */ static
-void yyerror (const char *msg)
-{
-  fprintf (stderr, "%s\n", msg);
-}
-static int
-yylex (void)
-{
-  static int counter = 1;
-  if (counter <= MAX)
-    return counter++;
-  assert (counter++ == MAX + 1);
-  return 0;
-}
-#include <stdlib.h> /* getenv. */
-#include <string.h> /* strcmp. */
-int
-main (int argc, char const* argv[])
-{
-  (void) argc;
-  (void) argv;
-  return yyparse ();
-}
-./torture.at:236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+ skipped (calc.at:1551)
+598. calc.at:1554: testing Calculator Java api.push-pull=both  ...
+./calc.at:1554: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
 ./calc.at:1492: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -221788,9 +220648,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+
 ./calc.at:1492: cat stderr
 ./calc.at:1492:  $PREPARSER ./calc  /dev/null
 stderr:
+597. calc.at:1552: testing Calculator Java parse.trace parse.error=verbose %locations %lex-param {InputStream is}  ...
+./calc.at:1552: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
 Starting parse
 Entering state 0
 Reading a token
@@ -221815,17 +220678,13 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1492: cat stderr
-607. torture.at:271: testing State number type: 129 states ...
-./torture.at:271: ruby $abs_top_srcdir/tests/linear 129 >input.y || exit 77
---- /dev/null	2023-05-17 22:25:16.000000000 -1200
-+++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/607/stderr	2023-05-18 03:35:15.708153118 -1200
-@@ -0,0 +1 @@
-+/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/607/test-source: line 14: ruby: command not found
+598. calc.at:1554: ./calc.at:1492: cat stderr
+ skipped (calc.at:1554)
 input:
-607. torture.at:271:   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 ./calc.at:1492:  $PREPARSER ./calc  input
- skipped (torture.at:271)
+599. calc.at:1555: testing Calculator Java api.push-pull=both parse.error=detailed %locations  ...
+./calc.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
 stderr:
 Starting parse
 Entering state 0
@@ -222074,9 +220933,11 @@
 Entering state 16
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+597. calc.at:1552: 
+ skipped (calc.at:1552)
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-
 stderr:
+
 Starting parse
 Entering state 0
 Reading a token
@@ -222335,6 +221196,9 @@
   }eg
 ' expout || exit 77
 ./calc.at:1492: cat stderr
+599. calc.at:1555:  skipped (calc.at:1555)
+600. calc.at:1556: testing Calculator Java parse.trace parse.error=custom %locations %lex-param {InputStream is} api.push-pull=both  ...
+./calc.at:1556: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
 input:
   | (!!) + (1 2) = 1
 ./calc.at:1492:  $PREPARSER ./calc  input
@@ -222451,6 +221315,7 @@
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
 stderr:
 Starting parse
 Entering state 0
@@ -222563,6 +221428,8 @@
 Entering state 16
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+601. calc.at:1557: testing Calculator Java parse.trace parse.error=verbose %locations %lex-param {InputStream is} api.push-pull=both  ...
+./calc.at:1557: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
 ./calc.at:1492: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -222573,18 +221440,17 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1492: cat stderr
-608. torture.at:272: testing State number type: 256 states ...
-./torture.at:272: ruby $abs_top_srcdir/tests/linear 256 >input.y || exit 77
+600. calc.at:1556: ./calc.at:1492: cat stderr
+ skipped (calc.at:1556)
 input:
+601. calc.at:1557: 602. calc.at:1560: testing Calculator Java parse.trace parse.error=custom %locations parse.lac=full  ...
+./calc.at:1560: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
   | (- *) + (1 2) = 1
 ./calc.at:1492:  $PREPARSER ./calc  input
+
+ skipped (calc.at:1557)
 stderr:
---- /dev/null	2023-05-17 22:25:16.000000000 -1200
-+++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/608/stderr	2023-05-18 03:35:16.544160852 -1200
-@@ -0,0 +1 @@
-+/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/608/test-source: line 14: ruby: command not found
-608. torture.at:272: Starting parse
+Starting parse
 Entering state 0
 Reading a token
 Next token is token '(' (1.1: )
@@ -222701,9 +221567,8 @@
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
- skipped (torture.at:272)
-stderr:
 
+stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -222831,7 +221696,10 @@
   }eg
 ' expout || exit 77
 ./calc.at:1492: cat stderr
+602. calc.at:1560:  skipped (calc.at:1560)
 input:
+603. calc.at:1561: testing Calculator Java parse.trace parse.error=custom %locations api.push-pull=both parse.lac=full  ...
+./calc.at:1561: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y
   | (* *) + (*) + (*)
 ./calc.at:1492:  $PREPARSER ./calc  input
 stderr:
@@ -222951,6 +221819,9 @@
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+604. torture.at:132: testing Big triangle ...
+./torture.at:138: "$PERL" -w ./gengram.pl 200 || exit 77
+
 stderr:
 Starting parse
 Entering state 0
@@ -223082,7 +221953,7 @@
   | 1 + 2 * 3 + !+ ++
 ./calc.at:1492:  $PREPARSER ./calc  input
 stderr:
-Starting parse
+603. calc.at:1561: Starting parse
 Entering state 0
 Reading a token
 Next token is token "number" (1.1: 1)
@@ -223147,9 +222018,10 @@
    $2 = token '+' (1.14: )
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
+ skipped (calc.at:1561)
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-stdout:
+
 Starting parse
 Entering state 0
 Reading a token
@@ -223215,6 +222087,13 @@
    $2 = token '+' (1.14: )
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
+input:
+605. torture.at:216: testing Big horizontal ...
+  | 1 + 2 * 3 + !- ++
+./calc.at:1492:  $PREPARSER ./calc  input
+./torture.at:230: "$PERL" -w ./gengram.pl 1000 || exit 77
+stdout:
+stderr:
 %code top { /* -*- c -*- */
 /* Adjust to the compiler.
   We used to do it here, but each time we add a new line,
@@ -223228,3303 +222107,13 @@
 %{
 #include <stdio.h>
 #include <stdlib.h>
-#include <assert.h>
-#define MAX 200
+#define MAX 1000
 static int yylex (void);
 #include <stdio.h>
 
 /* !POSIX */ static void yyerror (const char *msg);
 %}
-%union
-{
-  int val;
-};
-
-%token END "end"
-%type <val> exp input
-%token t1 1 "1"
-%token t2 2 "2"
-%token t3 3 "3"
-%token t4 4 "4"
-%token t5 5 "5"
-%token t6 6 "6"
-%token t7 7 "7"
-%token t8 8 "8"
-%token t9 9 "9"
-%token t10 10 "10"
-%token t11 11 "11"
-%token t12 12 "12"
-%token t13 13 "13"
-%token t14 14 "14"
-%token t15 15 "15"
-%token t16 16 "16"
-%token t17 17 "17"
-%token t18 18 "18"
-%token t19 19 "19"
-%token t20 20 "20"
-%token t21 21 "21"
-%token t22 22 "22"
-%token t23 23 "23"
-%token t24 24 "24"
-%token t25 25 "25"
-%token t26 26 "26"
-%token t27 27 "27"
-%token t28 28 "28"
-%token t29 29 "29"
-%token t30 30 "30"
-%token t31 31 "31"
-%token t32 32 "32"
-%token t33 33 "33"
-%token t34 34 "34"
-%token t35 35 "35"
-%token t36 36 "36"
-%token t37 37 "37"
-%token t38 38 "38"
-%token t39 39 "39"
-%token t40 40 "40"
-%token t41 41 "41"
-%token t42 42 "42"
-%token t43 43 "43"
-%token t44 44 "44"
-%token t45 45 "45"
-%token t46 46 "46"
-%token t47 47 "47"
-%token t48 48 "48"
-%token t49 49 "49"
-%token t50 50 "50"
-%token t51 51 "51"
-%token t52 52 "52"
-%token t53 53 "53"
-%token t54 54 "54"
-%token t55 55 "55"
-%token t56 56 "56"
-%token t57 57 "57"
-%token t58 58 "58"
-%token t59 59 "59"
-%token t60 60 "60"
-%token t61 61 "61"
-%token t62 62 "62"
-%token t63 63 "63"
-%token t64 64 "64"
-%token t65 65 "65"
-%token t66 66 "66"
-%token t67 67 "67"
-%token t68 68 "68"
-%token t69 69 "69"
-%token t70 70 "70"
-%token t71 71 "71"
-%token t72 72 "72"
-%token t73 73 "73"
-%token t74 74 "74"
-%token t75 75 "75"
-%token t76 76 "76"
-%token t77 77 "77"
-%token t78 78 "78"
-%token t79 79 "79"
-%token t80 80 "80"
-%token t81 81 "81"
-%token t82 82 "82"
-%token t83 83 "83"
-%token t84 84 "84"
-%token t85 85 "85"
-%token t86 86 "86"
-%token t87 87 "87"
-%token t88 88 "88"
-%token t89 89 "89"
-%token t90 90 "90"
-%token t91 91 "91"
-%token t92 92 "92"
-%token t93 93 "93"
-%token t94 94 "94"
-%token t95 95 "95"
-%token t96 96 "96"
-%token t97 97 "97"
-%token t98 98 "98"
-%token t99 99 "99"
-%token t100 100 "100"
-%token t101 101 "101"
-%token t102 102 "102"
-%token t103 103 "103"
-%token t104 104 "104"
-%token t105 105 "105"
-%token t106 106 "106"
-%token t107 107 "107"
-%token t108 108 "108"
-%token t109 109 "109"
-%token t110 110 "110"
-%token t111 111 "111"
-%token t112 112 "112"
-%token t113 113 "113"
-%token t114 114 "114"
-%token t115 115 "115"
-%token t116 116 "116"
-%token t117 117 "117"
-%token t118 118 "118"
-%token t119 119 "119"
-%token t120 120 "120"
-%token t121 121 "121"
-%token t122 122 "122"
-%token t123 123 "123"
-%token t124 124 "124"
-%token t125 125 "125"
-%token t126 126 "126"
-%token t127 127 "127"
-%token t128 128 "128"
-%token t129 129 "129"
-%token t130 130 "130"
-%token t131 131 "131"
-%token t132 132 "132"
-%token t133 133 "133"
-%token t134 134 "134"
-%token t135 135 "135"
-%token t136 136 "136"
-%token t137 137 "137"
-%token t138 138 "138"
-%token t139 139 "139"
-%token t140 140 "140"
-%token t141 141 "141"
-%token t142 142 "142"
-%token t143 143 "143"
-%token t144 144 "144"
-%token t145 145 "145"
-%token t146 146 "146"
-%token t147 147 "147"
-%token t148 148 "148"
-%token t149 149 "149"
-%token t150 150 "150"
-%token t151 151 "151"
-%token t152 152 "152"
-%token t153 153 "153"
-%token t154 154 "154"
-%token t155 155 "155"
-%token t156 156 "156"
-%token t157 157 "157"
-%token t158 158 "158"
-%token t159 159 "159"
-%token t160 160 "160"
-%token t161 161 "161"
-%token t162 162 "162"
-%token t163 163 "163"
-%token t164 164 "164"
-%token t165 165 "165"
-%token t166 166 "166"
-%token t167 167 "167"
-%token t168 168 "168"
-%token t169 169 "169"
-%token t170 170 "170"
-%token t171 171 "171"
-%token t172 172 "172"
-%token t173 173 "173"
-%token t174 174 "174"
-%token t175 175 "175"
-%token t176 176 "176"
-%token t177 177 "177"
-%token t178 178 "178"
-%token t179 179 "179"
-%token t180 180 "180"
-%token t181 181 "181"
-%token t182 182 "182"
-%token t183 183 "183"
-%token t184 184 "184"
-%token t185 185 "185"
-%token t186 186 "186"
-%token t187 187 "187"
-%token t188 188 "188"
-%token t189 189 "189"
-%token t190 190 "190"
-%token t191 191 "191"
-%token t192 192 "192"
-%token t193 193 "193"
-%token t194 194 "194"
-%token t195 195 "195"
-%token t196 196 "196"
-%token t197 197 "197"
-%token t198 198 "198"
-%token t199 199 "199"
-%token t200 200 "200"
-%%
-input:
-  exp        { assert ($1 == 0); $$ = $1; }
-| input exp  { assert ($2 == $1 + 1); $$ = $2; }
-;
 
-exp:
-  END
-    { $$ = 0; }
-| "1"  END 
-    { $$ = 1; }
-| "1" "2"  END 
-    { $$ = 2; }
-| "1" "2" "3"  END 
-    { $$ = 3; }
-| "1" "2" "3" "4"  END 
-    { $$ = 4; }
-| "1" "2" "3" "4" "5"  END 
-    { $$ = 5; }
-| "1" "2" "3" "4" "5" "6"  END 
-    { $$ = 6; }
-| "1" "2" "3" "4" "5" "6" "7"  END 
-    { $$ = 7; }
-| "1" "2" "3" "4" "5" "6" "7" "8"  END 
-    { $$ = 8; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9"  END 
-    { $$ = 9; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10"  END 
-    { $$ = 10; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11"  END 
-    { $$ = 11; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12"  END 
-    { $$ = 12; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13"  END 
-    { $$ = 13; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14"	END 
-    { $$ = 14; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15"  END 
-    { $$ = 15; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" 
-   END 
-    { $$ = 16; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17"  END 
-    { $$ = 17; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18"  END 
-    { $$ = 18; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19"  END 
-    { $$ = 19; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20"	END 
-    { $$ = 20; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21"  END 
-    { $$ = 21; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22"  END 
-    { $$ = 22; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23"  END 
-    { $$ = 23; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24"  END 
-    { $$ = 24; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25"  END 
-    { $$ = 25; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26"  END 
-    { $$ = 26; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27"  END 
-    { $$ = 27; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28"	END 
-    { $$ = 28; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29"  END 
-    { $$ = 29; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" 
-   END 
-    { $$ = 30; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31"  END 
-    { $$ = 31; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32"  END 
-    { $$ = 32; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33"  END 
-    { $$ = 33; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34"	END 
-    { $$ = 34; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35"  END 
-    { $$ = 35; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36"  END 
-    { $$ = 36; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37"  END 
-    { $$ = 37; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38"  END 
-    { $$ = 38; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39"  END 
-    { $$ = 39; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40"  END 
-    { $$ = 40; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41"  END 
-    { $$ = 41; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42"	END 
-    { $$ = 42; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43"  END 
-    { $$ = 43; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" 
-   END 
-    { $$ = 44; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45"  END 
-    { $$ = 45; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46"  END 
-    { $$ = 46; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47"  END 
-    { $$ = 47; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48"	END 
-    { $$ = 48; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49"  END 
-    { $$ = 49; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50"  END 
-    { $$ = 50; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51"  END 
-    { $$ = 51; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52"  END 
-    { $$ = 52; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53"  END 
-    { $$ = 53; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54"  END 
-    { $$ = 54; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55"  END 
-    { $$ = 55; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56"	END 
-    { $$ = 56; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57"  END 
-    { $$ = 57; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" 
-   END 
-    { $$ = 58; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59"  END 
-    { $$ = 59; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60"  END 
-    { $$ = 60; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61"  END 
-    { $$ = 61; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62"	END 
-    { $$ = 62; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63"  END 
-    { $$ = 63; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64"  END 
-    { $$ = 64; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65"  END 
-    { $$ = 65; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66"  END 
-    { $$ = 66; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67"  END 
-    { $$ = 67; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68"  END 
-    { $$ = 68; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69"  END 
-    { $$ = 69; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70"	END 
-    { $$ = 70; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71"  END 
-    { $$ = 71; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" 
-   END 
-    { $$ = 72; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73"  END 
-    { $$ = 73; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74"  END 
-    { $$ = 74; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75"  END 
-    { $$ = 75; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76"	END 
-    { $$ = 76; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77"  END 
-    { $$ = 77; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78"  END 
-    { $$ = 78; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79"  END 
-    { $$ = 79; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80"  END 
-    { $$ = 80; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81"  END 
-    { $$ = 81; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82"  END 
-    { $$ = 82; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83"  END 
-    { $$ = 83; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84"	END 
-    { $$ = 84; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85"  END 
-    { $$ = 85; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" 
-   END 
-    { $$ = 86; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87"  END 
-    { $$ = 87; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88"  END 
-    { $$ = 88; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89"  END 
-    { $$ = 89; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90"	END 
-    { $$ = 90; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91"  END 
-    { $$ = 91; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92"  END 
-    { $$ = 92; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93"  END 
-    { $$ = 93; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94"  END 
-    { $$ = 94; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95"  END 
-    { $$ = 95; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96"  END 
-    { $$ = 96; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97"  END 
-    { $$ = 97; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98"	END 
-    { $$ = 98; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99"  END 
-    { $$ = 99; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" 
-   END 
-    { $$ = 100; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101"  END 
-    { $$ = 101; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102"	END 
-    { $$ = 102; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103"  END 
-    { $$ = 103; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104"  END 
-    { $$ = 104; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105"  END 
-    { $$ = 105; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106"	END 
-    { $$ = 106; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107"  END 
-    { $$ = 107; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108"  END 
-    { $$ = 108; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109"  END 
-    { $$ = 109; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110"	END 
-    { $$ = 110; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111"  END 
-    { $$ = 111; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" 
-   END 
-    { $$ = 112; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113"  END 
-    { $$ = 113; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114"	END 
-    { $$ = 114; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115"  END 
-    { $$ = 115; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116"  END 
-    { $$ = 116; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117"  END 
-    { $$ = 117; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118"	END 
-    { $$ = 118; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119"  END 
-    { $$ = 119; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120"  END 
-    { $$ = 120; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121"  END 
-    { $$ = 121; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122"	END 
-    { $$ = 122; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123"  END 
-    { $$ = 123; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" 
-   END 
-    { $$ = 124; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125"  END 
-    { $$ = 125; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126"	END 
-    { $$ = 126; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127"  END 
-    { $$ = 127; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128"  END 
-    { $$ = 128; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129"  END 
-    { $$ = 129; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130"	END 
-    { $$ = 130; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131"  END 
-    { $$ = 131; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132"  END 
-    { $$ = 132; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133"  END 
-    { $$ = 133; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134"	END 
-    { $$ = 134; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135"  END 
-    { $$ = 135; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" 
-   END 
-    { $$ = 136; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137"  END 
-    { $$ = 137; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138"	END 
-    { $$ = 138; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139"  END 
-    { $$ = 139; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140"  END 
-    { $$ = 140; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141"  END 
-    { $$ = 141; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142"	END 
-    { $$ = 142; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143"  END 
-    { $$ = 143; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144"  END 
-    { $$ = 144; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145"  END 
-    { $$ = 145; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146"	END 
-    { $$ = 146; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147"  END 
-    { $$ = 147; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "input:
-133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" 
-   END 
-    { $$ = 148; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149"  END 
-    { $$ = 149; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150"	END 
-    { $$ = 150; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151"  END 
-    { $$ = 151; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152"  END 
-    { $$ = 152; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153"  END 
-    { $$ = 153; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154"	END 
-    { $$ = 154; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155"  END 
-    { $$ = 155; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156"  END 
-    { $$ = 156; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157"  END 
-    { $$ = 157; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158"	END 
-    { $$ = 158; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159"  END 
-    { $$ = 159; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" 
-   END 
-    { $$ = 160; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161"  END 
-    { $$ = 161; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162"	END 
-    { $$ = 162; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163"  END 
-    { $$ = 163; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164"  END 
-    { $$ = 164; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165"  END 
-    { $$ = 165; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166"	END 
-    { $$ = 166; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167"  END 
-    { $$ = 167; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168"  END 
-    { $$ = 168; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169"  END 
-    { $$ = 169; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170"	END 
-    { $$ = 170; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171"  END 
-    { $$ = 171; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" 
-   END 
-    { $$ = 172; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173"  END 
-    { $$ = 173; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174"	END 
-    { $$ = 174; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175"  END 
-    { $$ = 175; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176"  END 
-    { $$ = 176; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177"  END 
-    { $$ = 177; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178"	END 
-    { $$ = 178; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179"  END 
-    { $$ = 179; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179" "180"  END 
-    { $$ = 180; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179" "180" "181"  END 
-    { $$ = 181; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182"	END 
-    { $$ = 182; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183"  END 
-    { $$ = 183; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" 
-   END 
-    { $$ = 184; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
-   "185"  END 
-    { $$ = 185; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
-   "185" "186"	END 
-    { $$ = 186; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
-   "185" "186" "187"  END 
-    { $$ = 187; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
-   "185" "186" "187" "188"  END 
-    { $$ = 188; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
-   "185" "186" "187" "188" "189"  END 
-    { $$ = 189; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
-   "185" "186" "187" "188" "189" "190"	END 
-    { $$ = 190; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
-   "185" "186" "187" "188" "189" "190" "191"  END 
-    { $$ = 191; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
-   "185" "186" "187" "188" "189" "190" "191" "192"  END 
-    { $$ = 192; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
-   "185" "186" "187" "188" "189" "190" "191" "192" "193"  END 
-    { $$ = 193; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
-   "185" "186" "187" "188" "189" "190" "191" "192" "193" "194"	END 
-    { $$ = 194; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
-   "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195"  END 
-    { $$ = 195; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
-   "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" 
-   END 
-    { $$ = 196; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
-   "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196"
-   "197"  END 
-    { $$ = 197; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
-   "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196"
-   "197" "198"	END 
-    { $$ = 198; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
-   "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196"
-   "197" "198" "199"  END 
-    { $$ = 199; }
-| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
-   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
-   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
-   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
-   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
-   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
-   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
-   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
-   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
-   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
-   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
-   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
-   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
-   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
-   "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196"
-   "197" "198" "199" "200"  END 
-    { $$ = 200; }
-;
-%%
-
-
-
-
-/* A C error reporting function.  */
-/* !POSIX */ static
-void yyerror (const char *msg)
-{
-  fprintf (stderr, "%s\n", msg);
-}
-static int
-yylex (void)
-{
-  static int inner = 1;
-  static int outer = 0;
-  if (outer > MAX)
-    return 0;
-  else if (inner > outer)
-    {
-      inner = 1;
-      ++outer;
-      return END;
-    }
-  return inner++;
-}
-#include <stdlib.h> /* getenv. */
-#include <string.h> /* strcmp. */
-int
-main (int argc, char const* argv[])
-{
-  (void) argc;
-  (void) argv;
-  return yyparse ();
-}
-  | 1 + 2 * 3 + !- ++
-./calc.at:1492:  $PREPARSER ./calc  input
-./torture.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -v -o input.c input.y
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Reducing stack 0 by rule 18 (line 109):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-609. torture.at:273: testing State number type: 257 states ...
-./torture.at:273: ruby $abs_top_srcdir/tests/linear 257 >input.y || exit 77
-./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
---- /dev/null	2023-05-17 22:25:16.000000000 -1200
-+++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/609/stderr	2023-05-18 03:35:17.288167736 -1200
-@@ -0,0 +1 @@
-+/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/609/test-source: line 14: ruby: command not found
-609. torture.at:273:  skipped (torture.at:273)
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Reducing stack 0 by rule 18 (line 109):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-
-./calc.at:1492: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1492: cat stderr
-input:
-  | (#) + (#) = 2222
-./calc.at:1492:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token ')' (1.3: )
-Entering state 11
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
-Entering state 4
-Reading a token
-1.8: syntax error: invalid character: '#'
-Next token is token error (1.8: )
-Shifting token error (1.8: )
-Entering state 11
-Next token is token error (1.8: )
-Error: discarding token error (1.8: )
-Reading a token
-Next token is token ')' (1.9: )
-Entering state 11
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 29
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
-Entering state 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 18
-Reading a token
-Next token is token "number" (1.13-16: 2222)
-Shifting token "number" (1.13-16: 2222)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token ')' (1.3: )
-Entering state 11
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
-Entering state 4
-Reading a token
-1.8: syntax error: invalid character: '#'
-Next token is token error (1.8: )
-Shifting token error (1.8: )
-Entering state 11
-Next token is token error (1.8: )
-Error: discarding token error (1.8: )
-Reading a token
-Next token is token ')' (1.9: )
-Entering state 11
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 29
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
-Entering state 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 18
-Reading a token
-Next token is token "number" (1.13-16: 2222)
-Shifting token "number" (1.13-16: 2222)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1492: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1492: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1492:  $PREPARSER ./calc  input
-610. torture.at:274: testing State number type: 32768 states ...
-./torture.at:274: ruby $abs_top_srcdir/tests/linear 32768 >input.y || exit 77
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
-Reading a token
-Next token is token ')' (1.7: )
-Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 18
-Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
---- /dev/null	2023-05-17 22:25:16.000000000 -1200
-+++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/610/stderr	2023-05-18 03:35:18.052174804 -1200
-@@ -0,0 +1 @@
-+/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/610/test-source: line 14: ruby: command not found
-610. torture.at:274: ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
- skipped (torture.at:274)
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
-Reading a token
-Next token is token ')' (1.7: )
-Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 18
-Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-
-./calc.at:1492: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1492: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1492:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Reading a token
-Next token is token "number" (1.6: 1)
-Error: discarding token "number" (1.6: 1)
-Reading a token
-Next token is token ')' (1.7: )
-Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 18
-Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Reading a token
-Next token is token "number" (1.6: 1)
-Error: discarding token "number" (1.6: 1)
-Reading a token
-Next token is token ')' (1.7: )
-Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 18
-Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1492: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1492: cat stderr
-611. torture.at:275: testing State number type: 65536 states ...
-./torture.at:275: ruby $abs_top_srcdir/tests/linear 65536 >input.y || exit 77
---- /dev/null	2023-05-17 22:25:16.000000000 -1200
-+++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/611/stderr	2023-05-18 03:35:18.872182391 -1200
-@@ -0,0 +1 @@
-+/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/611/test-source: line 14: ruby: command not found
-input:
-611. torture.at:275:   | (1 + # + 1) = 1111
-./calc.at:1492:  $PREPARSER ./calc  input
- skipped (torture.at:275)
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Reading a token
-Next token is token "number" (1.10: 1)
-Error: discarding token "number" (1.10: 1)
-Reading a token
-Next token is token ')' (1.11: )
-Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 18
-Reading a token
-Next token is token "number" (1.15-18: 1111)
-Shifting token "number" (1.15-18: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Reading a token
-Next token is token "number" (1.10: 1)
-Error: discarding token "number" (1.10: 1)
-Reading a token
-Next token is token ')' (1.11: )
-Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 18
-Reading a token
-Next token is token "number" (1.15-18: 1111)
-Shifting token "number" (1.15-18: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1492: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1492: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1492:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Reading a token
-Next token is token "number" (1.6: 1)
-Shifting token "number" (1.6: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 29
-Reading a token
-Next token is token ')' (1.7: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
-Entering state 8
-Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 22
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.12: 1)
-Shifting token "number" (1.12: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.16: 1)
-Shifting token "number" (1.16: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 28
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 31
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 10 (line 93):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
-Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Reading a token
-Next token is token "number" (1.6: 1)
-Shifting token "number" (1.6: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 29
-Reading a token
-Next token is token ')' (1.7: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
-Entering state 8
-Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 22
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.12: 1)
-Shifting token "number" (1.12: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 19
-Reading a token
-Next token is token "number" (1.16: 1)
-Shifting token "number" (1.16: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 28
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 31
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 10 (line 93):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
-Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1492: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1492: cat stderr
-567. calc.at:1492:  ok
-612. torture.at:276: testing State number type: 65537 states ...
-./torture.at:276: ruby $abs_top_srcdir/tests/linear 65537 >input.y || exit 77
---- /dev/null	2023-05-17 22:25:16.000000000 -1200
-+++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/612/stderr	2023-05-18 03:35:19.640189496 -1200
-@@ -0,0 +1 @@
-+/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/612/test-source: line 14: ruby: command not found
-612. torture.at:276:  skipped (torture.at:276)
-
-
-614. torture.at:485: testing Exploding the Stack Size with Alloca ...
-613. torture.at:385: testing Many lookahead tokens ...
-./torture.at:387: "$PERL" -w ./gengram.pl 1000 || exit 77
-./torture.at:494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-stdout:
-%define parse.error verbose
-%debug
-%{
-/* Adjust to the compiler.
-  We used to do it here, but each time we add a new line,
-  we have to adjust all the line numbers in error messages.
-  It's simpler to use a constant include to a varying file.  */
-#include <testsuite.h>
-
-# include <stdio.h>
-# include <stdlib.h>
-# include <assert.h>
-# define MAX 1000
-static int yylex (void);
-#include <stdio.h>
-
-/* !POSIX */ static void yyerror (const char *msg);
-%}
-%union
-{
-  int val;
-};
-
-%type <val> input exp
-%token token
-%type <val> n1 n2 n3 n4 n5 n6 n7 n8 n9 n10 n11 n12 n13 n14 n15 n16 n17 n18
-	    n19 n20 n21 n22 n23 n24 n25 n26 n27 n28 n29 n30 n31 n32 n33 n34
-	    n35 n36 n37 n38 n39 n40 n41 n42 n43 n44 n45 n46 n47 n48 n49 n50
-	    n51 n52 n53 n54 n55 n56 n57 n58 n59 n60 n61 n62 n63 n64 n65 n66
-	    n67 n68 n69 n70 n71 n72 n73 n74 n75 n76 n77 n78 n79 n80 n81 n82
-	    n83 n84 n85 n86 n87 n88 n89 n90 n91 n92 n93 n94 n95 n96 n97 n98
-	    n99 n100 n101 n102 n103 n104 n105 n106 n107 n108 n109 n110 n111
-	    n112 n113 n114 n115 n116 n117 n118 n119 n120 n121 n122 n123
-	    n124 n125 n126 n127 n128 n129 n130 n131 n132 n133 n134 n135
-	    n136 n137 n138 n139 n140 n141 n142 n143 n144 n145 n146 n147
-	    n148 n149 n150 n151 n152 n153 n154 n155 n156 n157 n158 n159
-	    n160 n161 n162 n163 n164 n165 n166 n167 n168 n169 n170 n171
-	    n172 n173 n174 n175 n176 n177 n178 n179 n180 n181 n182 n183
-	    n184 n185 n186 n187 n188 n189 n190 n191 n192 n193 n194 n195
-	    n196 n197 n198 n199 n200 n201 n202 n203 n204 n205 n206 n207
-	    n208 n209 n210 n211 n212 n213 n214 n215 n216 n217 n218 n219
-	    n220 n221 n222 n223 n224 n225 n226 n227 n228 n229 n230 n231
-	    n232 n233 n234 n235 n236 n237 n238 n239 n240 n241 n242 n243
-	    n244 n245 n246 n247 n248 n249 n250 n251 n252 n253 n254 n255
-	    n256 n257 n258 n259 n260 n261 n262 n263 n264 n265 n266 n267
-	    n268 n269 n270 n271 n272 n273 n274 n275 n276 n277 n278 n279
-	    n280 n281 n282 n283 n284 n285 n286 n287 n288 n289 n290 n291
-	    n292 n293 n294 n295 n296 n297 n298 n299 n300 n301 n302 n303
-	    n304 n305 n306 n307 n308 n309 n310 n311 n312 n313 n314 n315
-	    n316 n317 n318 n319 n320 n321 n322 n323 n324 n325 n326 n327
-	    n328 n329 n330 n331 n332 n333 n334 n335 n336 n337 n338 n339
-	    n340 n341 n342 n343 n344 n345 n346 n347 n348 n349 n350 n351
-	    n352 n353 n354 n355 n356 n357 n358 n359 n360 n361 n362 n363
-	    n364 n365 n366 n367 n368 n369 n370 n371 n372 n373 n374 n375
-	    n376 n377 n378 n379 n380 n381 n382 n383 n384 n385 n386 n387
-	    n388 n389 n390 n391 n392 n393 n394 n395 n396 n397 n398 n399
-	    n400 n401 n402 n403 n404 n405 n406 n407 n408 n409 n410 n411
-	    n412 n413 n414 n415 n416 n417 n418 n419 n420 n421 n422 n423
-	    n424 n425 n426 n427 n428 n429 n430 n431 n432 n433 n434 n435
-	    n436 n437 n438 n439 n440 n441 n442 n443 n444 n445 n446 n447
-	    n448 n449 n450 n451 n452 n453 n454 n455 n456 n457 n458 n459
-	    n460 n461 n462 n463 n464 n465 n466 n467 n468 n469 n470 n471
-	    n472 n473 n474 n475 n476 n477 n478 n479 n480 n481 n482 n483
-	    n484 n485 n486 n487 n488 n489 n490 n491 n492 n493 n494 n495
-	    n496 n497 n498 n499 n500 n501 n502 n503 n504 n505 n506 n507
-	    n508 n509 n510 n511 n512 n513 n514 n515 n516 n517 n518 n519
-	    n520 n521 n522 n523 n524 n525 n526 n527 n528 n529 n530 n531
-	    n532 n533 n534 n535 n536 n537 n538 n539 n540 n541 n542 n543
-	    n544 n545 n546 n547 n548 n549 n550 n551 n552 n553 n554 n555
-	    n556 n557 n558 n559 n560 n561 n562 n563 n564 n565 n566 n567
-	    n568 n569 n570 n571 n572 n573 n574 n575 n576 n577 n578 n579
-	    n580 n581 n582 n583 n584 n585 n586 n587 n588 n589 n590 n591
-	    n592 n593 n594 n595 n596 n597 n598 n599 n600 n601 n602 n603
-	    n604 n605 n606 n607 n608 n609 n610 n611 n612 n613 n614 n615
-	    n616 n617 n618 n619 n620 n621 n622 n623 n624 n625 n626 n627
-	    n628 n629 n630 n631 n632 n633 n634 n635 n636 n637 n638 n639
-	    n640 n641 n642 n643 n644 n645 n646 n647 n648 n649 n650 n651
-	    n652 n653 n654 n655 n656 n657 n658 n659 n660 n661 n662 n663
-	    n664 n665 n666 n667 n668 n669 n670 n671 n672 n673 n674 n675
-	    n676 n677 n678 n679 n680 n681 n682 n683 n684 n685 n686 n687
-	    n688 n689 n690 n691 n692 n693 n694 n695 n696 n697 n698 n699
-	    n700 n701 n702 n703 n704 n705 n706 n707 n708 n709 n710 n711
-	    n712 n713 n714 n715 n716 n717 n718 n719 n720 n721 n722 n723
-	    n724 n725 n726 n727 n728 n729 n730 n731 n732 n733 n734 n735
-	    n736 n737 n738 n739 n740 n741 n742 n743 n744 n745 n746 n747
-	    n748 n749 n750 n751 n752 n753 n754 n755 n756 n757 n758 n759
-	    n760 n761 n762 n763 n764 n765 n766 n767 n768 n769 n770 n771
-	    n772 n773 n774 n775 n776 n777 n778 n779 n780 n781 n782 n783
-	    n784 n785 n786 n787 n788 n789 n790 n791 n792 n793 n794 n795
-	    n796 n797 n798 n799 n800 n801 n802 n803 n804 n805 n806 n807
-	    n808 n809 n810 n811 n812 n813 n814 n815 n816 n817 n818 n819
-	    n820 n821 n822 n823 n824 n825 n826 n827 n828 n829 n830 n831
-	    n832 n833 n834 n835 n836 n837 n838 n839 n840 n841 n842 n843
-	    n844 n845 n846 n847 n848 n849 n850 n851 n852 n853 n854 n855
-	    n856 n857 n858 n859 n860 n861 n862 n863 n864 n865 n866 n867
-	    n868 n869 n870 n871 n872 n873 n874 n875 n876 n877 n878 n879
-	    n880 n881 n882 n883 n884 n885 n886 n887 n888 n889 n890 n891
-	    n892 n893 n894 n895 n896 n897 n898 n899 n900 n901 n902 n903
-	    n904 n905 n906 n907 n908 n909 n910 n911 n912 n913 n914 n915
-	    n916 n917 n918 n919 n920 n921 n922 n923 n924 n925 n926 n927
-	    n928 n929 n930 n931 n932 n933 n934 n935 n936 n937 n938 n939
-	    n940 n941 n942 n943 n944 n945 n946 n947 n948 n949 n950 n951
-	    n952 n953 n954 n955 n956 n957 n958 n959 n960 n961 n962 n963
-	    n964 n965 n966 n967 n968 n969 n970 n971 n972 n973 n974 n975
-	    n976 n977 n978 n979 n980 n981 n982 n983 n984 n985 n986 n987
-	    n988 n989 n990 n991 n992 n993 n994 n995 n996 n997 n998 n999
-	    n1000
 %token
     t1 1 "1"
     t2 2 "2"
@@ -226788,2753 +222377,831 @@
     t260 260 "260"
     t261 261 "261"
     t262 262 "262"
-    t263 263 "263"
-    t264 264 "264"
-    t265 265 "265"
-    t266 266 "266"
-    t267 267 "267"
-    t268 268 "268"
-    t269 269 "269"
-    t270 270 "270"
-    t271 271 "271"
-    t272 272 "272"
-    t273 273 "273"
-    t274 274 "274"
-    t275 275 "275"
-    t276 276 "276"
-    t277 277 "277"
-    t278 278 "278"
-    t279 279 "279"
-    t280 280 "280"
-    t281 281 "281"
-    t282 282 "282"
-    t283 283 "283"
-    t284 284 "284"
-    t285 285 "285"
-    t286 286 "286"
-    t287 287 "287"
-    t288 288 "288"
-    t289 289 "289"
-    t290 290 "290"
-    t291 291 "291"
-    t292 292 "292"
-    t293 293 "293"
-    t294 294 "294"
-    t295 295 "295"
-    t296 296 "296"
-    t297 297 "297"
-    t298 298 "298"
-    t299 299 "299"
-    t300 300 "300"
-    t301 301 "301"
-    t302 302 "302"
-    t303 303 "303"
-    t304 304 "304"
-    t305 305 "305"
-    t306 306 "306"
-    t307 307 "307"
-    t308 308 "308"
-    t309 309 "309"
-    t310 310 "310"
-    t311 311 "311"
-    t312 312 "312"
-    t313 313 "313"
-    t314 314 "314"
-    t315 315 "315"
-    t316 316 "316"
-    t317 317 "317"
-    t318 318 "318"
-    t319 319 "319"
-    t320 320 "320"
-    t321 321 "321"
-    t322 322 "322"
-    t323 323 "323"
-    t324 324 "324"
-    t325 325 "325"
-    t326 326 "326"
-    t327 327 "327"
-    t328 328 "328"
-    t329 329 "329"
-    t330 330 "330"
-    t331 331 "331"
-    t332 332 "332"
-    t333 333 "333"
-    t334 334 "334"
-    t335 335 "335"
-    t336 336 "336"
-    t337 337 "337"
-    t338 338 "338"
-    t339 339 "339"
-    t340 340 "340"
-    t341 341 "341"
-    t342 342 "342"
-    t343 343 "343"
-    t344 344 "344"
-    t345 345 "345"
-    t346 346 "346"
-    t347 347 "347"
-    t348 348 "348"
-    t349 349 "349"
-    t350 350 "350"
-    t351 351 "351"
-    t352 352 "352"
-    t353 353 "353"
-    t354 354 "354"
-    t355 355 "355"
-    t356 356 "356"
-    t357 357 "357"
-    t358 358 "358"
-    t359 359 "359"
-    t360 360 "360"
-    t361 361 "361"
-    t362 362 "362"
-    t363 363 "363"
-    t364 364 "364"
-    t365 365 "365"
-    t366 366 "366"
-    t367 367 "367"
-    t368 368 "368"
-    t369 369 "369"
-    t370 370 "370"
-    t371 371 "371"
-    t372 372 "372"
-    t373 373 "373"
-    t374 374 "374"
-    t375 375 "375"
-    t376 376 "376"
-    t377 377 "377"
-    t378 378 "378"
-    t379 379 "379"
-    t380 380 "380"
-    t381 381 "381"
-    t382 382 "382"
-    t383 383 "383"
-    t384 384 "384"
-    t385 385 "385"
-    t386 386 "386"
-    t387 387 "387"
-    t388 388 "388"
-    t389 389 "389"
-    t390 390 "390"
-    t391 391 "391"
-    t392 392 "392"
-    t393 393 "393"
-    t394 394 "394"
-    t395 395 "395"
-    t396 396 "396"
-    t397 397 "397"
-    t398 398 "398"
-    t399 399 "399"
-    t400 400 "400"
-    t401 401 "401"
-    t402 402 "402"
-    t403 403 "403"
-    t404 404 "404"
-    t405 405 "405"
-    t406 406 "406"
-    t407 407 "407"
-    t408 408 "408"
-    t409 409 "409"
-    t410 410 "410"
-    t411 411 "411"
-    t412 412 "412"
-    t413 413 "413"
-    t414 414 "414"
-    t415 415 "415"
-    t416 416 "416"
-    t417 417 "417"
-    t418 418 "418"
-    t419 419 "419"
-    t420 420 "420"
-    t421 421 "421"
-    t422 422 "422"
-    t423 423 "423"
-    t424 424 "424"
-    t425 425 "425"
-    t426 426 "426"
-    t427 427 "427"
-    t428 428 "428"
-    t429 429 "429"
-    t430 430 "430"
-    t431 431 "431"
-    t432 432 "432"
-    t433 433 "433"
-    t434 434 "434"
-    t435 435 "435"
-    t436 436 "436"
-    t437 437 "437"
-    t438 438 "438"
-    t439 439 "439"
-    t440 440 "440"
-    t441 441 "441"
-    t442 442 "442"
-    t443 443 "443"
-    t444 444 "444"
-    t445 445 "445"
-    t446 446 "446"
-    t447 447 "447"
-    t448 448 "448"
-    t449 449 "449"
-    t450 450 "450"
-    t451 451 "451"
-    t452 452 "452"
-    t453 453 "453"
-    t454 454 "454"
-    t455 455 "455"
-    t456 456 "456"
-    t457 457 "457"
-    t458 458 "458"
-    t459 459 "459"
-    t460 460 "460"
-    t461 461 "461"
-    t462 462 "462"
-    t463 463 "463"
-    t464 464 "464"
-    t465 465 "465"
-    t466 466 "466"
-    t467 467 "467"
-    t468 468 "468"
-    t469 469 "469"
-    t470 470 "470"
-    t471 471 "471"
-    t472 472 "472"
-    t473 473 "473"
-    t474 474 "474"
-    t475 475 "475"
-    t476 476 "476"
-    t477 477 "477"
-    t478 478 "478"
-    t479 479 "479"
-    t480 480 "480"
-    t481 481 "481"
-    t482 482 "482"
-    t483 483 "483"
-    t484 484 "484"
-    t485 485 "485"
-    t486 486 "486"
-    t487 487 "487"
-    t488 488 "488"
-    t489 489 "489"
-    t490 490 "490"
-    t491 491 "491"
-    t492 492 "492"
-    t493 493 "493"
-    t494 494 "494"
-    t495 495 "495"
-    t496 496 "496"
-    t497 497 "497"
-    t498 498 "498"
-    t499 499 "499"
-    t500 500 "500"
-    t501 501 "501"
-    t502 502 "502"
-    t503 503 "503"
-    t504 504 "504"
-    t505 505 "505"
-    t506 506 "506"
-    t507 507 "507"
-    t508 508 "508"
-    t509 509 "509"
-    t510 510 "510"
-    t511 511 "511"
-    t512 512 "512"
-    t513 513 "513"
-    t514 514 "514"
-    t515 515 "515"
-    t516 516 "516"
-    t517 517 "517"
-    t518 518 "518"
-    t519 519 "519"
-    t520 520 "520"
-    t521 521 "521"
-    t522 522 "522"
-    t523 523 "523"
-    t524 524 "524"
-    t525 525 "525"
-    t526 526 "526"
-    t527 527 "527"
-    t528 528 "528"
-    t529 529 "529"
-    t530 530 "530"
-    t531 531 "531"
-    t532 532 "532"
-    t533 533 "533"
-    t534 534 "534"
-    t535 535 "535"
-    t536 536 "536"
-    t537 537 "537"
-    t538 538 "538"
-    t539 539 "539"
-    t540 540 "540"
-    t541 541 "541"
-    t542 542 "542"
-    t543 543 "543"
-    t544 544 "544"
-    t545 545 "545"
-    t546 546 "546"
-    t547 547 "547"
-    t548 548 "548"
-    t549 549 "549"
-    t550 550 "550"
-    t551 551 "551"
-    t552 552 "552"
-    t553 553 "553"
-    t554 554 "554"
-    t555 555 "555"
-    t556 556 "556"
-    t557 557 "557"
-    t558 558 "558"
-    t559 559 "559"
-    t560 560 "560"
-    t561 561 "561"
-    t562 562 "562"
-    t563 563 "563"
-    t564 564 "564"
-    t565 565 "565"
-    t566 566 "566"
-    t567 567 "567"
-    t568 568 "568"
-    t569 569 "569"
-    t570 570 "570"
-    t571 571 "571"
-    t572 572 "572"
-    t573 573 "573"
-    t574 574 "574"
-    t575 575 "575"
-    t576 576 "576"
-    t577 577 "577"
-    t578 578 "578"
-    t579 579 "579"
-    t580 580 "580"
-    t581 581 "581"
-    t582 582 "582"
-    t583 583 "583"
-    t584 584 "584"
-    t585 585 "585"
-    t586 586 "586"
-    t587 587 "587"
-    t588 588 "588"
-    t589 589 "589"
-    t590 590 "590"
-    t591 591 "591"
-    t592 592 "592"
-    t593 593 "593"
-    t594 594 "594"
-    t595 595 "595"
-    t596 596 "596"
-    t597 597 "597"
-    t598 598 "598"
-    t599 599 "599"
-    t600 600 "600"
-    t601 601 "601"
-    t602 602 "602"
-    t603 603 "603"
-    t604 604 "604"
-    t605 605 "605"
-    t606 606 "606"
-    t607 607 "607"
-    t608 608 "608"
-    t609 609 "609"
-    t610 610 "610"
-    t611 611 "611"
-    t612 612 "612"
-    t613 613 "613"
-    t614 614 "614"
-    t615 615 "615"
-    t616 616 "616"
-    t617 617 "617"
-    t618 618 "618"
-    t619 619 "619"
-    t620 620 "620"
-    t621 621 "621"
-    t622 622 "622"
-    t623 623 "623"
-    t624 624 "624"
-    t625 625 "625"
-    t626 626 "626"
-    t627 627 "627"
-    t628 628 "628"
-    t629 629 "629"
-    t630 630 "630"
-    t631 631 "631"
-    t632 632 "632"
-    t633 633 "633"
-    t634 634 "634"
-    t635 635 "635"
-    t636 636 "636"
-    t637 637 "637"
-    t638 638 "638"
-    t639 639 "639"
-    t640 640 "640"
-    t641 641 "641"
-    t642 642 "642"
-    t643 643 "643"
-    t644 644 "644"
-    t645 645 "645"
-    t646 646 "646"
-    t647 647 "647"
-    t648 648 "648"
-    t649 649 "649"
-    t650 650 "650"
-    t651 651 "651"
-    t652 652 "652"
-    t653 653 "653"
-    t654 654 "654"
-    t655 655 "655"
-    t656 656 "656"
-    t657 657 "657"
-    t658 658 "658"
-    t659 659 "659"
-    t660 660 "660"
-    t661 661 "661"
-    t662 662 "662"
-    t663 663 "663"
-    t664 664 "664"
-    t665 665 "665"
-    t666 666 "666"
-    t667 667 "667"
-    t668 668 "668"
-    t669 669 "669"
-    t670 670 "670"
-    t671 671 "671"
-    t672 672 "672"
-    t673 673 "673"
-    t674 674 "674"
-    t675 675 "675"
-    t676 676 "676"
-    t677 677 "677"
-    t678 678 "678"
-    t679 679 "679"
-    t680 680 "680"
-    t681 681 "681"
-    t682 682 "682"
-    t683 683 "683"
-    t684 684 "684"
-    t685 685 "685"
-    t686 686 "686"
-    t687 687 "687"
-    t688 688 "688"
-    t689 689 "689"
-    t690 690 "690"
-    t691 691 "691"
-    t692 692 "692"
-    t693 693 "693"
-    t694 694 "694"
-    t695 695 "695"
-    t696 696 "696"
-    t697 697 "697"
-    t698 698 "698"
-    t699 699 "699"
-    t700 700 "700"
-    t701 701 "701"
-    t702 702 "702"
-    t703 703 "703"
-    t704 704 "704"
-    t705 705 "705"
-    t706 706 "706"
-    t707 707 "707"
-    t708 708 "708"
-    t709 709 "709"
-    t710 710 "710"
-    t711 711 "711"
-    t712 712 "712"
-    t713 713 "713"
-    t714 714 "714"
-    t715 715 "715"
-    t716 716 "716"
-    t717 717 "717"
-    t718 718 "718"
-    t719 719 "719"
-    t720 720 "720"
-    t721 721 "721"
-    t722 722 "722"
-    t723 723 "723"
-    t724 724 "724"
-    t725 725 "725"
-    t726 726 "726"
-    t727 727 "727"
-    t728 728 "728"
-    t729 729 "729"
-    t730 730 "730"
-    t731 731 "731"
-    t732 732 "732"
-    t733 733 "733"
-    t734 734 "734"
-    t735 735 "735"
-    t736 736 "736"
-    t737 737 "737"
-    t738 738 "738"
-    t739 739 "739"
-    t740 740 "740"
-    t741 741 "741"
-    t742 742 "742"
-    t743 743 "743"
-    t744 744 "744"
-    t745 745 "745"
-    t746 746 "746"
-    t747 747 "747"
-    t748 748 "748"
-    t749 749 "749"
-    t750 750 "750"
-    t751 751 "751"
-    t752 752 "752"
-    t753 753 "753"
-    t754 754 "754"
-    t755 755 "755"
-    t756 756 "756"
-    t757 757 "757"
-    t758 758 "758"
-    t759 759 "759"
-    t760 760 "760"
-    t761 761 "761"
-    t762 762 "762"
-    t763 763 "763"
-    t764 764 "764"
-    t765 765 "765"
-    t766 766 "766"
-    t767 767 "767"
-    t768 768 "768"
-    t769 769 "769"
-    t770 770 "770"
-    t771 771 "771"
-    t772 772 "772"
-    t773 773 "773"
-    t774 774 "774"
-    t775 775 "775"
-    t776 776 "776"
-    t777 777 "777"
-    t778 778 "778"
-    t779 779 "779"
-    t780 780 "780"
-    t781 781 "781"
-    t782 782 "782"
-    t783 783 "783"
-    t784 784 "784"
-    t785 785 "785"
-    t786 786 "786"
-    t787 787 "787"
-    t788 788 "788"
-    t789 789 "789"
-    t790 790 "790"
-    t791 791 "791"
-    t792 792 "792"
-    t793 793 "793"
-    t794 794 "794"
-    t795 795 "795"
-    t796 796 "796"
-    t797 797 "797"
-    t798 798 "798"
-    t799 799 "799"
-    t800 800 "800"
-    t801 801 "801"
-    t802 802 "802"
-    t803 803 "803"
-    t804 804 "804"
-    t805 805 "805"
-    t806 806 "806"
-    t807 807 "807"
-    t808 808 "808"
-    t809 809 "809"
-    t810 810 "810"
-    t811 811 "811"
-    t812 812 "812"
-    t813 813 "813"
-    t814 814 "814"
-    t815 815 "815"
-    t816 816 "816"
-    t817 817 "817"
-    t818 818 "818"
-    t819 819 "819"
-    t820 820 "820"
-    t821 821 "821"
-    t822 822 "822"
-    t823 823 "823"
-    t824 824 "824"
-    t825 825 "825"
-    t826 826 "826"
-    t827 827 "827"
-    t828 828 "828"
-    t829 829 "829"
-    t830 830 "830"
-    t831 831 "831"
-    t832 832 "832"
-    t833 833 "833"
-    t834 834 "834"
-    t835 835 "835"
-    t836 836 "836"
-    t837 837 "837"
-    t838 838 "838"
-    t839 839 "839"
-    t840 840 "840"
-    t841 841 "841"
-    t842 842 "842"
-    t843 843 "843"
-    t844 844 "844"
-    t845 845 "845"
-    t846 846 "846"
-    t847 847 "847"
-    t848 848 "848"
-    t849 849 "849"
-    t850 850 "850"
-    t851 851 "851"
-    t852 852 "852"
-    t853 853 "853"
-    t854 854 "854"
-    t855 855 "855"
-    t856 856 "856"
-    t857 857 "857"
-    t858 858 "858"
-    t859 859 "859"
-    t860 860 "860"
-    t861 861 "861"
-    t862 862 "862"
-    t863 863 "863"
-    t864 864 "864"
-    t865 865 "865"
-    t866 866 "866"
-    t867 867 "867"
-    t868 868 "868"
-    t869 869 "869"
-    t870 870 "870"
-    t871 871 "871"
-    t872 872 "872"
-    t873 873 "873"
-    t874 874 "874"
-    t875 875 "875"
-    t876 876 "876"
-    t877 877 "877"
-    t878 878 "878"
-    t879 879 "879"
-    t880 880 "880"
-    t881 881 "881"
-    t882 882 "882"
-    t883 883 "883"
-    t884 884 "884"
-    t885 885 "885"
-    t886 886 "886"
-    t887 887 "887"
-    t888 888 "888"
-    t889 889 "889"
-    t890 890 "890"
-    t891 891 "891"
-    t892 892 "892"
-    t893 893 "893"
-    t894 894 "894"
-    t895 895 "895"
-    t896 896 "896"
-    t897 897 "897"
-    t898 898 "898"
-    t899 899 "899"
-    t900 900 "900"
-    t901 901 "901"
-    t902 902 "902"
-    t903 903 "903"
-    t904 904 "904"
-    t905 905 "905"
-    t906 906 "906"
-    t907 907 "907"
-    t908 908 "908"
-    t909 909 "909"
-    t910 910 "910"
-    t911 911 "911"
-    t912 912 "912"
-    t913 913 "913"
-    t914 914 "914"
-    t915 915 "915"
-    t916 916 "916"
-    t917 917 "917"
-    t918 918 "918"
-    t919 919 "919"
-    t920 920 "920"
-    t921 921 "921"
-    t922 922 "922"
-    t923 923 "923"
-    t924 924 "924"
-    t925 925 "925"
-    t926 926 "926"
-    t927 927 "927"
-    t928 928 "928"
-    t929 929 "929"
-    t930 930 "930"
-    t931 931 "931"
-    t932 932 "932"
-    t933 933 "933"
-    t934 934 "934"
-    t935 935 "935"
-    t936 936 "936"
-    t937 937 "937"
-    t938 938 "938"
-    t939 939 "939"
-    t940 940 "940"
-    t941 941 "941"
-    t942 942 "942"
-    t943 943 "943"
-    t944 944 "944"
-    t945 945 "945"
-    t946 946 "946"
-    t947 947 "947"
-    t948 948 "948"
-    t949 949 "949"
-    t950 950 "950"
-    t951 951 "951"
-    t952 952 "952"
-    t953 953 "953"
-    t954 954 "954"
-    t955 955 "955"
-    t956 956 "956"
-    t957 957 "957"
-    t958 958 "958"
-    t959 959 "959"
-    t960 960 "960"
-    t961 961 "961"
-    t962 962 "962"
-    t963 963 "963"
-    t964 964 "964"
-    t965 965 "965"
-    t966 966 "966"
-    t967 967 "967"
-    t968 968 "968"
-    t969 969 "969"
-    t970 970 "970"
-    t971 971 "971"
-    t972 972 "972"
-    t973 973 "973"
-    t974 974 "974"
-    t975 975 "975"
-    t976 976 "976"
-    t977 977 "977"
-    t978 978 "978"
-    t979 979 "979"
-    t980 980 "980"
-    t981 981 "981"
-    t982 982 "982"
-    t983 983 "983"
-    t984 984 "984"
-    t985 985 "985"
-    t986 986 "986"
-    t987 987 "987"
-    t988 988 "988"
-    t989 989 "989"
-    t990 990 "990"
-    t991 991 "991"
-    t992 992 "992"
-    t993 993 "993"
-    t994 994 "994"
-    t995 995 "995"
-    t996 996 "996"
-    t997 997 "997"
-    t998 998 "998"
-    t999 999 "999"
-    t1000 1000 "1000"
-%%
-input:
-  exp        { assert ($1 == 1); $$ = $1; }
-| input exp  { assert ($2 == $1 + 1); $$ = $2; }
-;
-
-exp:
-  n1 "1" { assert ($1 == 1); $$ = $1; }
-| n2 "2" { assert ($1 == 2); $$ = $1; }
-| n3 "3" { assert ($1 == 3); $$ = $1; }
-| n4 "4" { assert ($1 == 4); $$ = $1; }
-| n5 "5" { assert ($1 == 5); $$ = $1; }
-| n6 "6" { assert ($1 == 6); $$ = $1; }
-| n7 "7" { assert ($1 == 7); $$ = $1; }
-| n8 "8" { assert ($1 == 8); $$ = $1; }
-| n9 "9" { assert ($1 == 9); $$ = $1; }
-| n10 "10" { assert ($1 == 10); $$ = $1; }
-| n11 "11" { assert ($1 == 11); $$ = $1; }
-| n12 "12" { assert ($1 == 12); $$ = $1; }
-| n13 "13" { assert ($1 == 13); $$ = $1; }
-| n14 "14" { assert ($1 == 14); $$ = $1; }
-| n15 "15" { assert ($1 == 15); $$ = $1; }
-| n16 "16" { assert ($1 == 16); $$ = $1; }
-| n17 "17" { assert ($1 == 17); $$ = $1; }
-| n18 "18" { assert ($1 == 18); $$ = $1; }
-| n19 "19" { assert ($1 == 19); $$ = $1; }
-| n20 "20" { assert ($1 == 20); $$ = $1; }
-| n21 "21" { assert ($1 == 21); $$ = $1; }
-| n22 "22" { assert ($1 == 22); $$ = $1; }
-| n23 "23" { assert ($1 == 23); $$ = $1; }
-| n24 "24" { assert ($1 == 24); $$ = $1; }
-| n25 "25" { assert ($1 == 25); $$ = $1; }
-| n26 "26" { assert ($1 == 26); $$ = $1; }
-| n27 "27" { assert ($1 == 27); $$ = $1; }
-| n28 "28" { assert ($1 == 28); $$ = $1; }
-| n29 "29" { assert ($1 == 29); $$ = $1; }
-| n30 "30" { assert ($1 == 30); $$ = $1; }
-| n31 "31" { assert ($1 == 31); $$ = $1; }
-| n32 "32" { assert ($1 == 32); $$ = $1; }
-| n33 "33" { assert ($1 == 33); $$ = $1; }
-| n34 "34" { assert ($1 == 34); $$ = $1; }
-| n35 "35" { assert ($1 == 35); $$ = $1; }
-| n36 "36" { assert ($1 == 36); $$ = $1; }
-| n37 "37" { assert ($1 == 37); $$ = $1; }
-| n38 "38" { assert ($1 == 38); $$ = $1; }
-| n39 "39" { assert ($1 == 39); $$ = $1; }
-| n40 "40" { assert ($1 == 40); $$ = $1; }
-| n41 "41" { assert ($1 == 41); $$ = $1; }
-| n42 "42" { assert ($1 == 42); $$ = $1; }
-| n43 "43" { assert ($1 == 43); $$ = $1; }
-| n44 "44" { assert ($1 == 44); $$ = $1; }
-| n45 "45" { assert ($1 == 45); $$ = $1; }
-| n46 "46" { assert ($1 == 46); $$ = $1; }
-| n47 "47" { assert ($1 == 47); $$ = $1; }
-| n48 "48" { assert ($1 == 48); $$ = $1; }
-| n49 "49" { assert ($1 == 49); $$ = $1; }
-| n50 "50" { assert ($1 == 50); $$ = $1; }
-| n51 "51" { assert ($1 == 51); $$ = $1; }
-| n52 "52" { assert ($1 == 52); $$ = $1; }
-| n53 "53" { assert ($1 == 53); $$ = $1; }
-| n54 "54" { assert ($1 == 54); $$ = $1; }
-| n55 "55" { assert ($1 == 55); $$ = $1; }
-| n56 "56" { assert ($1 == 56); $$ = $1; }
-| n57 "57" { assert ($1 == 57); $$ = $1; }
-| n58 "58" { assert ($1 == 58); $$ = $1; }
-| n59 "59" { assert ($1 == 59); $$ = $1; }
-| n60 "60" { assert ($1 == 60); $$ = $1; }
-| n61 "61" { assert ($1 == 61); $$ = $1; }
-| n62 "62" { assert ($1 == 62); $$ = $1; }
-| n63 "63" { assert ($1 == 63); $$ = $1; }
-| n64 "64" { assert ($1 == 64); $$ = $1; }
-| n65 "65" { assert ($1 == 65); $$ = $1; }
-| n66 "66" { assert ($1 == 66); $$ = $1; }
-| n67 "67" { assert ($1 == 67); $$ = $1; }
-| n68 "68" { assert ($1 == 68); $$ = $1; }
-| n69 "69" { assert ($1 == 69); $$ = $1; }
-| n70 "70" { assert ($1 == 70); $$ = $1; }
-| n71 "71" { assert ($1 == 71); $$ = $1; }
-| n72 "72" { assert ($1 == 72); $$ = $1; }
-| n73 "73" { assert ($1 == 73); $$ = $1; }
-| n74 "74" { assert ($1 == 74); $$ = $1; }
-| n75 "75" { assert ($1 == 75); $$ = $1; }
-| n76 "76" { assert ($1 == 76); $$ = $1; }
-| n77 "77" { assert ($1 == 77); $$ = $1; }
-| n78 "78" { assert ($1 == 78); $$ = $1; }
-| n79 "79" { assert ($1 == 79); $$ = $1; }
-| n80 "80" { assert ($1 == 80); $$ = $1; }
-| n81 "81" { assert ($1 == 81); $$ = $1; }
-| n82 "82" { assert ($1 == 82); $$ = $1; }
-| n83 "83" { assert ($1 == 83); $$ = $1; }
-| n84 "84" { assert ($1 == 84); $$ = $1; }
-| n85 "85" { assert ($1 == 85); $$ = $1; }
-| n86 "86" { assert ($1 == 86); $$ = $1; }
-| n87 "87" { assert ($1 == 87); $$ = $1; }
-| n88 "88" { assert ($1 == 88); $$ = $1; }
-| n89 "89" { assert ($1 == 89); $$ = $1; }
-| n90 "90" { assert ($1 == 90); $$ = $1; }
-| n91 "91" { assert ($1 == 91); $$ = $1; }
-| n92 "92" { assert ($1 == 92); $$ = $1; }
-| n93 "93" { assert ($1 == 93); $$ = $1; }
-| n94 "94" { assert ($1 == 94); $$ = $1; }
-| n95 "95" { assert ($1 == 95); $$ = $1; }
-| n96 "96" { assert ($1 == 96); $$ = $1; }
-| n97 "97" { assert ($1 == 97); $$ = $1; }
-| n98 "98" { assert ($1 == 98); $$ = $1; }
-| n99 "99" { assert ($1 == 99); $$ = $1; }
-| n100 "100" { assert ($1 == 100); $$ = $1; }
-| n101 "101" { assert ($1 == 101); $$ = $1; }
-| n102 "102" { assert ($1 == 102); $$ = $1; }
-| n103 "103" { assert ($1 == 103); $$ = $1; }
-| n104 "104" { assert ($1 == 104); $$ = $1; }
-| n105 "105" { assert ($1 == 105); $$ = $1; }
-| n106 "106" { assert ($1 == 106); $$ = $1; }
-| n107 "107" { assert ($1 == 107); $$ = $1; }
-| n108 "108" { assert ($1 == 108); $$ = $1; }
-| n109 "109" { assert ($1 == 109); $$ = $1; }
-| n110 "110" { assert ($1 == 110); $$ = $1; }
-| n111 "111" { assert ($1 == 111); $$ = $1; }
-| n112 "112" { assert ($1 == 112); $$ = $1; }
-| n113 "113" { assert ($1 == 113); $$ = $1; }
-| n114 "114" { assert ($1 == 114); $$ = $1; }
-| n115 "115" { assert ($1 == 115); $$ = $1; }
-| n116 "116" { assert ($1 == 116); $$ = $1; }
-| n117 "117" { assert ($1 == 117); $$ = $1; }
-| n118 "118" { assert ($1 == 118); $$ = $1; }
-| n119 "119" { assert ($1 == 119); $$ = $1; }
-| n120 "120" { assert ($1 == 120); $$ = $1; }
-| n121 "121" { assert ($1 == 121); $$ = $1; }
-| n122 "122" { assert ($1 == 122); $$ = $1; }
-| n123 "123" { assert ($1 == 123); $$ = $1; }
-| n124 "124" { assert ($1 == 124); $$ = $1; }
-| n125 "125" { assert ($1 == 125); $$ = $1; }
-| n126 "126" { assert ($1 == 126); $$ = $1; }
-| n127 "127" { assert ($1 == 127); $$ = $1; }
-| n128 "128" { assert ($1 == 128); $$ = $1; }
-| n129 "129" { assert ($1 == 129); $$ = $1; }
-| n130 "130" { assert ($1 == 130); $$ = $1; }
-| n131 "131" { assert ($1 == 131); $$ = $1; }
-| n132 "132" { assert ($1 == 132); $$ = $1; }
-| n133 "133" { assert ($1 == 133); $$ = $1; }
-| n134 "134" { assert ($1 == 134); $$ = $1; }
-| n135 "135" { assert ($1 == 135); $$ = $1; }
-| n136 "136" { assert ($1 == 136); $$ = $1; }
-| n137 "137" { assert ($1 == 137); $$ = $1; }
-| n138 "138" { assert ($1 == 138); $$ = $1; }
-| n139 "139" { assert ($1 == 139); $$ = $1; }
-| n140 "140" { assert ($1 == 140); $$ = $1; }
-| n141 "141" { assert ($1 == 141); $$ = $1; }
-| n142 "142" { assert ($1 == 142); $$ = $1; }
-| n143 "143" { assert ($1 == 143); $$ = $1; }
-| n144 "144" { assert ($1 == 144); $$ = $1; }
-| n145 "145" { assert ($1 == 145); $$ = $1; }
-| n146 "146" { assert ($1 == 146); $$ = $1; }
-| n147 "147" { assert ($1 == 147); $$ = $1; }
-| n148 "148" { assert ($1 == 148); $$ = $1; }
-| n149 "149" { assert ($1 == 149); $$ = $1; }
-| n150 "150" { assert ($1 == 150); $$ = $1; }
-| n151 "151" { assert ($1 == 151); $$ = $1; }
-| n152 "152" { assert ($1 == 152); $$ = $1; }
-| n153 "153" { assert ($1 == 153); $$ = $1; }
-| n154 "154" { assert ($1 == 154); $$ = $1; }
-| n155 "155" { assert ($1 == 155); $$ = $1; }
-| n156 "156" { assert ($1 == 156); $$ = $1; }
-| n157 "157" { assert ($1 == 157); $$ = $1; }
-| n158 "158" { assert ($1 == 158); $$ = $1; }
-| n159 "159" { assert ($1 == 159); $$ = $1; }
-| n160 "160" { assert ($1 == 160); $$ = $1; }
-| n161 "161" { assert ($1 == 161); $$ = $1; }
-| n162 "162" { assert ($1 == 162); $$ = $1; }
-| n163 "163" { assert ($1 == 163); $$ = $1; }
-| n164 "164" { assert ($1 == 164); $$ = $1; }
-| n165 "165" { assert ($1 == 165); $$ = $1; }
-| n166 "166" { assert ($1 == 166); $$ = $1; }
-| n167 "167" { assert ($1 == 167); $$ = $1; }
-| n168 "168" { assert ($1 == 168); $$ = $1; }
-| n169 "169" { assert ($1 == 169); $$ = $1; }
-| n170 "170" { assert ($1 == 170); $$ = $1; }
-| n171 "171" { assert ($1 == 171); $$ = $1; }
-| n172 "172" { assert ($1 == 172); $$ = $1; }
-| n173 "173" { assert ($1 == 173); $$ = $1; }
-| n174 "174" { assert ($1 == 174); $$ = $1; }
-| n175 "175" { assert ($1 == 175); $$ = $1; }
-| n176 "176" { assert ($1 == 176); $$ = $1; }
-| n177 "177" { assert ($1 == 177); $$ = $1; }
-| n178 "178" { assert ($1 == 178); $$ = $1; }
-| n179 "179" { assert ($1 == 179); $$ = $1; }
-| n180 "180" { assert ($1 == 180); $$ = $1; }
-| n181 "181" { assert ($1 == 181); $$ = $1; }
-| n182 "182" { assert ($1 == 182); $$ = $1; }
-| n183 "183" { assert ($1 == 183); $$ = $1; }
-| n184 "184" { assert ($1 == 184); $$ = $1; }
-| n185 "185" { assert ($1 == 185); $$ = $1; }
-| n186 "186" { assert ($1 == 186); $$ = $1; }
-| n187 "187" { assert ($1 == 187); $$ = $1; }
-| n188 "188" { assert ($1 == 188); $$ = $1; }
-| n189 "189" { assert ($1 == 189); $$ = $1; }
-| n190 "190" { assert ($1 == 190); $$ = $1; }
-| n191 "191" { assert ($1 == 191); $$ = $1; }
-| n192 "192" { assert ($1 == 192); $$ = $1; }
-| n193 "193" { assert ($1 == 193); $$ = $1; }
-| n194 "194" { assert ($1 == 194); $$ = $1; }
-| n195 "195" { assert ($1 == 195); $$ = $1; }
-| n196 "196" { assert ($1 == 196); $$ = $1; }
-| n197 "197" { assert ($1 == 197); $$ = $1; }
-| n198 "198" { assert ($1 == 198); $$ = $1; }
-| n199 "199" { assert ($1 == 199); $$ = $1; }
-| n200 "200" { assert ($1 == 200); $$ = $1; }
-| n201 "201" { assert ($1 == 201); $$ = $1; }
-| n202 "202" { assert ($1 == 202); $$ = $1; }
-| n203 "203" { assert ($1 == 203); $$ = $1; }
-| n204 "204" { assert ($1 == 204); $$ = $1; }
-| n205 "205" { assert ($1 == 205); $$ = $1; }
-| n206 "206" { assert ($1 == 206); $$ = $1; }
-| n207 "207" { assert ($1 == 207); $$ = $1; }
-| n208 "208" { assert ($1 == 208); $$ = $1; }
-| n209 "209" { assert ($1 == 209); $$ = $1; }
-| n210 "210" { assert ($1 == 210); $$ = $1; }
-| n211 "211" { assert ($1 == 211); $$ = $1; }
-| n212 "212" { assert ($1 == 212); $$ = $1; }
-| n213 "213" { assert ($1 == 213); $$ = $1; }
-| n214 "214" { assert ($1 == 214); $$ = $1; }
-| n215 "215" { assert ($1 == 215); $$ = $1; }
-| n216 "216" { assert ($1 == 216); $$ = $1; }
-| n217 "217" { assert ($1 == 217); $$ = $1; }
-| n218 "218" { assert ($1 == 218); $$ = $1; }
-| n219 "219" { assert ($1 == 219); $$ = $1; }
-| n220 "220" { assert ($1 == 220); $$ = $1; }
-| n221 "221" { assert ($1 == 221); $$ = $1; }
-| n222 "222" { assert ($1 == 222); $$ = $1; }
-| n223 "223" { assert ($1 == 223); $$ = $1; }
-| n224 "224" { assert ($1 == 224); $$ = $1; }
-| n225 "225" { assert ($1 == 225); $$ = $1; }
-| n226 "226" { assert ($1 == 226); $$ = $1; }
-| n227 "227" { assert ($1 == 227); $$ = $1; }
-| n228 "228" { assert ($1 == 228); $$ = $1; }
-| n229 "229" { assert ($1 == 229); $$ = $1; }
-| n230 "230" { assert ($1 == 230); $$ = $1; }
-| n231 "231" { assert ($1 == 231); $$ = $1; }
-| n232 "232" { assert ($1 == 232); $$ = $1; }
-| n233 "233" { assert ($1 == 233); $$ = $1; }
-| n234 "234" { assert ($1 == 234); $$ = $1; }
-| n235 "235" { assert ($1 == 235); $$ = $1; }
-| n236 "236" { assert ($1 == 236); $$ = $1; }
-| n237 "237" { assert ($1 == 237); $$ = $1; }
-| n238 "238" { assert ($1 == 238); $$ = $1; }
-| n239 "239" { assert ($1 == 239); $$ = $1; }
-| n240 "240" { assert ($1 == 240); $$ = $1; }
-| n241 "241" { assert ($1 == 241); $$ = $1; }
-| n242 "242" { assert ($1 == 242); $$ = $1; }
-| n243 "243" { assert ($1 == 243); $$ = $1; }
-| n244 "244" { assert ($1 == 244); $$ = $1; }
-| n245 "245" { assert ($1 == 245); $$ = $1; }
-| n246 "246" { assert ($1 == 246); $$ = $1; }
-| n247 "247" { assert ($1 == 247); $$ = $1; }
-| n248 "248" { assert ($1 == 248); $$ = $1; }
-| n249 "249" { assert ($1 == 249); $$ = $1; }
-| n250 "250" { assert ($1 == 250); $$ = $1; }
-| n251 "251" { assert ($1 == 251); $$ = $1; }
-| n252 "252" { assert ($1 == 252); $$ = $1; }
-| n253 "253" { assert ($1 == 253); $$ = $1; }
-| n254 "254" { assert ($1 == 254); $$ = $1; }
-| n255 "255" { assert ($1 == 255); $$ = $1; }
-| n256 "256" { assert ($1 == 256); $$ = $1; }
-| n257 "257" { assert ($1 == 257); $$ = $1; }
-| n258 "258" { assert ($1 == 258); $$ = $1; }
-| n259 "259" { assert ($1 == 259); $$ = $1; }
-| n260 "260" { assert ($1 == 260); $$ = $1; }
-| n261 "261" { assert ($1 == 261); $$ = $1; }
-| n262 "262" { assert ($1 == 262); $$ = $1; }
-| n263 "263" { assert ($1 == 263); $$ = $1; }
-| n264 "264" { assert ($1 == 264); $$ = $1; }
-| n265 "265" { assert ($1 == 265); $$ = $1; }
-| n266 "266" { assert ($1 == 266); $$ = $1; }
-| n267 "267" { assert ($1 == 267); $$ = $1; }
-| n268 "268" { assert ($1 == 268); $$ = $1; }
-| n269 "269" { assert ($1 == 269); $$ = $1; }
-| n270 "270" { assert ($1 == 270); $$ = $1; }
-| n271 "271" { assert ($1 == 271); $$ = $1; }
-| n272 "272" { assert ($1 == 272); $$ = $1; }
-| n273 "273" { assert ($1 == 273); $$ = $1; }
-| n274 "274" { assert ($1 == 274); $$ = $1; }
-| n275 "275" { assert ($1 == 275); $$ = $1; }
-| n276 "276" { assert ($1 == 276); $$ = $1; }
-| n277 "277" { assert ($1 == 277); $$ = $1; }
-| n278 "278" { assert ($1 == 278); $$ = $1; }
-| n279 "279" { assert ($1 == 279); $$ = $1; }
-| n280 "280" { assert ($1 == 280); $$ = $1; }
-| n281 "281" { assert ($1 == 281); $$ = $1; }
-| n282 "282" { assert ($1 == 282); $$ = $1; }
-| n283 "283" { assert ($1 == 283); $$ = $1; }
-| n284 "284" { assert ($1 == 284); $$ = $1; }
-| n285 "285" { assert ($1 == 285); $$ = $1; }
-| n286 "286" { assert ($1 == 286); $$ = $1; }
-| n287 "287" { assert ($1 == 287); $$ = $1; }
-| n288 "288" { assert ($1 == 288); $$ = $1; }
-| n289 "289" { assert ($1 == 289); $$ = $1; }
-| n290 "290" { assert ($1 == 290); $$ = $1; }
-| n291 "291" { assert ($1 == 291); $$ = $1; }
-| n292 "292" { assert ($1 == 292); $$ = $1; }
-| n293 "293" { assert ($1 == 293); $$ = $1; }
-| n294 "294" { assert ($1 == 294); $$ = $1; }
-| n295 "295" { assert ($1 == 295); $$ = $1; }
-| n296 "296" { assert ($1 == 296); $$ = $1; }
-| n297 "297" { assert ($1 == 297); $$ = $1; }
-| n298 "298" { assert ($1 == 298); $$ = $1; }
-| n299 "299" { assert ($1 == 299); $$ = $1; }
-| n300 "300" { assert ($1 == 300); $$ = $1; }
-| n301 "301" { assert ($1 == 301); $$ = $1; }
-| n302 "302" { assert ($1 == 302); $$ = $1; }
-| n303 "303" { assert ($1 == 303); $$ = $1; }
-| n304 "304" { assert ($1 == 304); $$ = $1; }
-| n305 "305" { assert ($1 == 305); $$ = $1; }
-| n306 "306" { assert ($1 == 306); $$ = $1; }
-| n307 "307" { assert ($1 == 307); $$ = $1; }
-| n308 "308" { assert ($1 == 308); $$ = $1; }
-| n309 "309" { assert ($1 == 309); $$ = $1; }
-| n310 "310" { assert ($1 == 310); $$ = $1; }
-| n311 "311" { assert ($1 == 311); $$ = $1; }
-| n312 "312" { assert ($1 == 312); $$ = $1; }
-| n313 "313" { assert ($1 == 313); $$ = $1; }
-| n314 "314" { assert ($1 == 314); $$ = $1; }
-| n315 "315" { assert ($1 == 315); $$ = $1; }
-| n316 "316" { assert ($1 == 316); $$ = $1; }
-| n317 "317" { assert ($1 == 317); $$ = $1; }
-| n318 "318" { assert ($1 == 318); $$ = $1; }
-| n319 "319" { assert ($1 == 319); $$ = $1; }
-| n320 "320" { assert ($1 == 320); $$ = $1; }
-| n321 "321" { assert ($1 == 321); $$ = $1; }
-| n322 "322" { assert ($1 == 322); $$ = $1; }
-| n323 "323" { assert ($1 == 323); $$ = $1; }
-| n324 "324" { assert ($1 == 324); $$ = $1; }
-| n325 "325" { assert ($1 == 325); $$ = $1; }
-| n326 "326" { assert ($1 == 326); $$ = $1; }
-| n327 "327" { assert ($1 == 327); $$ = $1; }
-| n328 "328" { assert ($1 == 328); $$ = $1; }
-| n329 "329" { assert ($1 == 329); $$ = $1; }
-| n330 "330" { assert ($1 == 330); $$ = $1; }
-| n331 "331" { assert ($1 == 331); $$ = $1; }
-| n332 "332" { assert ($1 == 332); $$ = $1; }
-| n333 "333" { assert ($1 == 333); $$ = $1; }
-| n334 "334" { assert ($1 == 334); $$ = $1; }
-| n335 "335" { assert ($1 == 335); $$ = $1; }
-| n336 "336" { assert ($1 == 336); $$ = $1; }
-| n337 "337" { assert ($1 == 337); $$ = $1; }
-| n338 "338" { assert ($1 == 338); $$ = $1; }
-| n339 "339" { assert ($1 == 339); $$ = $1; }
-| n340 "340" { assert ($1 == 340); $$ = $1; }
-| n341 "341" { assert ($1 == 341); $$ = $1; }
-| n342 "342" { assert ($1 == 342); $$ = $1; }
-| n343 "343" { assert ($1 == 343); $$ = $1; }
-| n344 "344" { assert ($1 == 344); $$ = $1; }
-| n345 "345" { assert ($1 == 345); $$ = $1; }
-| n346 "346" { assert ($1 == 346); $$ = $1; }
-| n347 "347" { assert ($1 == 347); $$ = $1; }
-| n348 "348" { assert ($1 == 348); $$ = $1; }
-| n349 "349" { assert ($1 == 349); $$ = $1; }
-| n350 "350" { assert ($1 == 350); $$ = $1; }
-| n351 "351" { assert ($1 == 351); $$ = $1; }
-| n352 "352" { assert ($1 == 352); $$ = $1; }
-| n353 "353" { assert ($1 == 353); $$ = $1; }
-| n354 "354" { assert ($1 == 354); $$ = $1; }
-| n355 "355" { assert ($1 == 355); $$ = $1; }
-| n356 "356" { assert ($1 == 356); $$ = $1; }
-| n357 "357" { assert ($1 == 357); $$ = $1; }
-| n358 "358" { assert ($1 == 358); $$ = $1; }
-| n359 "359" { assert ($1 == 359); $$ = $1; }
-| n360 "360" { assert ($1 == 360); $$ = $1; }
-| n361 "361" { assert ($1 == 361); $$ = $1; }
-| n362 "362" { assert ($1 == 362); $$ = $1; }
-| n363 "363" { assert ($1 == 363); $$ = $1; }
-| n364 "364" { assert ($1 == 364); $$ = $1; }
-| n365 "365" { assert ($1 == 365); $$ = $1; }
-| n366 "366" { assert ($1 == 366); $$ = $1; }
-| n367 "367" { assert ($1 == 367); $$ = $1; }
-| n368 "368" { assert ($1 == 368); $$ = $1; }
-| n369 "369" { assert ($1 == 369); $$ = $1; }
-| n370 "370" { assert ($1 == 370); $$ = $1; }
-| n371 "371" { assert ($1 == 371); $$ = $1; }
-| n372 "372" { assert ($1 == 372); $$ = $1; }
-| n373 "373" { assert ($1 == 373); $$ = $1; }
-| n374 "374" { assert ($1 == 374); $$ = $1; }
-| n375 "375" { assert ($1 == 375); $$ = $1; }
-| n376 "376" { assert ($1 == 376); $$ = $1; }
-| n377 "377" { assert ($1 == 377); $$ = $1; }
-| n378 "378" { assert ($1 == 378); $$ = $1; }
-| n379 "379" { assert ($1 == 379); $$ = $1; }
-| n380 "380" { assert ($1 == 380); $$ = $1; }
-| n381 "381" { assert ($1 == 381); $$ = $1; }
-| n382 "382" { assert ($1 == 382); $$ = $1; }
-| n383 "383" { assert ($1 == 383); $$ = $1; }
-| n384 "384" { assert ($1 == 384); $$ = $1; }
-| n385 "385" { assert ($1 == 385); $$ = $1; }
-| n386 "386" { assert ($1 == 386); $$ = $1; }
-| n387 "387" { assert ($1 == 387); $$ = $1; }
-| n388 "388" { assert ($1 == 388); $$ = $1; }
-| n389 "389" { assert ($1 == 389); $$ = $1; }
-| n390 "390" { assert ($1 == 390); $$ = $1; }
-| n391 "391" { assert ($1 == 391); $$ = $1; }
-| n392 "392" { assert ($1 == 392); $$ = $1; }
-| n393 "393" { assert ($1 == 393); $$ = $1; }
-| n394 "394" { assert ($1 == 394); $$ = $1; }
-| n395 "395" { assert ($1 == 395); $$ = $1; }
-| n396 "396" { assert ($1 == 396); $$ = $1; }
-| n397 "397" { assert ($1 == 397); $$ = $1; }
-| n398 "398" { assert ($1 == 398); $$ = $1; }
-| n399 "399" { assert ($1 == 399); $$ = $1; }
-| n400 "400" { assert ($1 == 400); $$ = $1; }
-| n401 "401" { assert ($1 == 401); $$ = $1; }
-| n402 "402" { assert ($1 == 402); $$ = $1; }
-| n403 "403" { assert ($1 == 403); $$ = $1; }
-| n404 "404" { assert ($1 == 404); $$ = $1; }
-| n405 "405" { assert ($1 == 405); $$ = $1; }
-| n406 "406" { assert ($1 == 406); $$ = $1; }
-| n407 "407" { assert ($1 == 407); $$ = $1; }
-| n408 "408" { assert ($1 == 408); $$ = $1; }
-| n409 "409" { assert ($1 == 409); $$ = $1; }
-| n410 "410" { assert ($1 == 410); $$ = $1; }
-| n411 "411" { assert ($1 == 411); $$ = $1; }
-| n412 "412" { assert ($1 == 412); $$ = $1; }
-| n413 "413" { assert ($1 == 413); $$ = $1; }
-| n414 "414" { assert ($1 == 414); $$ = $1; }
-| n415 "415" { assert ($1 == 415); $$ = $1; }
-| n416 "416" { assert ($1 == 416); $$ = $1; }
-| n417 "417" { assert ($1 == 417); $$ = $1; }
-| n418 "418" { assert ($1 == 418); $$ = $1; }
-| n419 "419" { assert ($1 == 419); $$ = $1; }
-| n420 "420" { assert ($1 == 420); $$ = $1; }
-| n421 "421" { assert ($1 == 421); $$ = $1; }
-| n422 "422" { assert ($1 == 422); $$ = $1; }
-| n423 "423" { assert ($1 == 423); $$ = $1; }
-| n424 "424" { assert ($1 == 424); $$ = $1; }
-| n425 "425" { assert ($1 == 425); $$ = $1; }
-| n426 "426" { assert ($1 == 426); $$ = $1; }
-| n427 "427" { assert ($1 == 427); $$ = $1; }
-| n428 "428" { assert ($1 == 428); $$ = $1; }
-| n429 "429" { assert ($1 == 429); $$ = $1; }
-| n430 "430" { assert ($1 == 430); $$ = $1; }
-| n431 "431" { assert ($1 == 431); $$ = $1; }
-| n432 "432" { assert ($1 == 432); $$ = $1; }
-| n433 "433" { assert ($1 == 433); $$ = $1; }
-| n434 "434" { assert ($1 == 434); $$ = $1; }
-| n435 "435" { assert ($1 == 435); $$ = $1; }
-| n436 "436" { assert ($1 == 436); $$ = $1; }
-| n437 "437" { assert ($1 == 437); $$ = $1; }
-| n438 "438" { assert ($1 == 438); $$ = $1; }
-| n439 "439" { assert ($1 == 439); $$ = $1; }
-| n440 "440" { assert ($1 == 440); $$ = $1; }
-| n441 "441" { assert ($1 == 441); $$ = $1; }
-| n442 "442" { assert ($1 == 442); $$ = $1; }
-| n443 "443" { assert ($1 == 443); $$ = $1; }
-| n444 "444" { assert ($1 == 444); $$ = $1; }
-| n445 "445" { assert ($1 == 445); $$ = $1; }
-| n446 "446" { assert ($1 == 446); $$ = $1; }
-| n447 "447" { assert ($1 == 447); $$ = $1; }
-| n448 "448" { assert ($1 == 448); $$ = $1; }
-| n449 "449" { assert ($1 == 449); $$ = $1; }
-| n450 "450" { assert ($1 == 450); $$ = $1; }
-| n451 "451" { assert ($1 == 451); $$ = $1; }
-| n452 "452" { assert ($1 == 452); $$ = $1; }
-| n453 "453" { assert ($1 == 453); $$ = $1; }
-| n454 "454" { assert ($1 == 454); $$ = $1; }
-| n455 "455" { assert ($1 == 455); $$ = $1; }
-| n456 "456" { assert ($1 == 456); $$ = $1; }
-| n457 "457" { assert ($1 == 457); $$ = $1; }
-| n458 "458" { assert ($1 == 458); $$ = $1; }
-| n459 "459" { assert ($1 == 459); $$ = $1; }
-| n460 "460" { assert ($1 == 460); $$ = $1; }
-| n461 "461" { assert ($1 == 461); $$ = $1; }
-| n462 "462" { assert ($1 == 462); $$ = $1; }
-| n463 "463" { assert ($1 == 463); $$ = $1; }
-| n464 "464" { assert ($1 == 464); $$ = $1; }
-| n465 "465" { assert ($1 == 465); $$ = $1; }
-| n466 "466" { assert ($1 == 466); $$ = $1; }
-| n467 "467" { assert ($1 == 467); $$ = $1; }
-| n468 "468" { assert ($1 == 468); $$ = $1; }
-| n469 "469" { assert ($1 == 469); $$ = $1; }
-| n470 "470" { assert ($1 == 470); $$ = $1; }
-| n471 "471" { assert ($1 == 471); $$ = $1; }
-| n472 "472" { assert ($1 == 472); $$ = $1; }
-| n473 "473" { assert ($1 == 473); $$ = $1; }
-| n474 "474" { assert ($1 == 474); $$ = $1; }
-| n475 "475" { assert ($1 == 475); $$ = $1; }
-| n476 "476" { assert ($1 == 476); $$ = $1; }
-| n477 "477" { assert ($1 == 477); $$ = $1; }
-| n478 "478" { assert ($1 == 478); $$ = $1; }
-| n479 "479" { assert ($1 == 479); $$ = $1; }
-| n480 "480" { assert ($1 == 480); $$ = $1; }
-| n481 "481" { assert ($1 == 481); $$ = $1; }
-| n482 "482" { assert ($1 == 482); $$ = $1; }
-| n483 "483" { assert ($1 == 483); $$ = $1; }
-| n484 "484" { assert ($1 == 484); $$ = $1; }
-| n485 "485" { assert ($1 == 485); $$ = $1; }
-| n486 "486" { assert ($1 == 486); $$ = $1; }
-| n487 "487" { assert ($1 == 487); $$ = $1; }
-| n488 "488" { assert ($1 == 488); $$ = $1; }
-| n489 "489" { assert ($1 == 489); $$ = $1; }
-| n490 "490" { assert ($1 == 490); $$ = $1; }
-| n491 "491" { assert ($1 == 491); $$ = $1; }
-| n492 "492" { assert ($1 == 492); $$ = $1; }
-| n493 "493" { assert ($1 == 493); $$ = $1; }
-| n494 "494" { assert ($1 == 494); $$ = $1; }
-| n495 "495" { assert ($1 == 495); $$ = $1; }
-| n496 "496" { assert ($1 == 496); $$ = $1; }
-| n497 "497" { assert ($1 == 497); $$ = $1; }
-| n498 "498" { assert ($1 == 498); $$ = $1; }
-| n499 "499" { assert ($1 == 499); $$ = $1; }
-| n500 "500" { assert ($1 == 500); $$ = $1; }
-| n501 "501" { assert ($1 == 501); $$ = $1; }
-| n502 "502" { assert ($1 == 502); $$ = $1; }
-| n503 "503" { assert ($1 == 503); $$ = $1; }
-| n504 "504" { assert ($1 == 504); $$ = $1; }
-| n505 "505" { assert ($1 == 505); $$ = $1; }
-| n506 "506" { assert ($1 == 506); $$ = $1; }
-| n507 "507" { assert ($1 == 507); $$ = $1; }
-| n508 "508" { assert ($1 == 508); $$ = $1; }
-| n509 "509" { assert ($1 == 509); $$ = $1; }
-| n510 "510" { assert ($1 == 510); $$ = $1; }
-| n511 "511" { assert ($1 == 511); $$ = $1; }
-| n512 "512" { assert ($1 == 512); $$ = $1; }
-| n513 "513" { assert ($1 == 513); $$ = $1; }
-| n514 "514" { assert ($1 == 514); $$ = $1; }
-| n515 "515" { assert ($1 == 515); $$ = $1; }
-| n516 "516" { assert ($1 == 516); $$ = $1; }
-| n517 "517" { assert ($1 == 517); $$ = $1; }
-| n518 "518" { assert ($1 == 518); $$ = $1; }
-| n519 "519" { assert ($1 == 519); $$ = $1; }
-| n520 "520" { assert ($1 == 520); $$ = $1; }
-| n521 "521" { assert ($1 == 521); $$ = $1; }
-| n522 "522" { assert ($1 == 522); $$ = $1; }
-| n523 "523" { assert ($1 == 523); $$ = $1; }
-| n524 "524" { assert ($1 == 524); $$ = $1; }
-| n525 "525" { assert ($1 == 525); $$ = $1; }
-| n526 "526" { assert ($1 == 526); $$ = $1; }
-| n527 "527" { assert ($1 == 527); $$ = $1; }
-| n528 "528" { assert ($1 == 528); $$ = $1; }
-| n529 "529" { assert ($1 == 529); $$ = $1; }
-| n530 "530" { assert ($1 == 530); $$ = $1; }
-| n531 "531" { assert ($1 == 531); $$ = $1; }
-| n532 "532" { assert ($1 == 532); $$ = $1; }
-| n533 "533" { assert ($1 == 533); $$ = $1; }
-| n534 "534" { assert ($1 == 534); $$ = $1; }
-| n535 "535" { assert ($1 == 535); $$ = $1; }
-| n536 "536" { assert ($1 == 536); $$ = $1; }
-| n537 "537" { assert ($1 == 537); $$ = $1; }
-| n538 "538" { assert ($1 == 538); $$ = $1; }
-| n539 "539" { assert ($1 == 539); $$ = $1; }
-| n540 "540" { assert ($1 == 540); $$ = $1; }
-| n541 "541" { assert ($1 == 541); $$ = $1; }
-| n542 "542" { assert ($1 == 542); $$ = $1; }
-| n543 "543" { assert ($1 == 543); $$ = $1; }
-| n544 "544" { assert ($1 == 544); $$ = $1; }
-| n545 "545" { assert ($1 == 545); $$ = $1; }
-| n546 "546" { assert ($1 == 546); $$ = $1; }
-| n547 "547" { assert ($1 == 547); $$ = $1; }
-| n548 "548" { assert ($1 == 548); $$ = $1; }
-| n549 "549" { assert ($1 == 549); $$ = $1; }
-| n550 "550" { assert ($1 == 550); $$ = $1; }
-| n551 "551" { assert ($1 == 551); $$ = $1; }
-| n552 "552" { assert ($1 == 552); $$ = $1; }
-| n553 "553" { assert ($1 == 553); $$ = $1; }
-| n554 "554" { assert ($1 == 554); $$ = $1; }
-| n555 "555" { assert ($1 == 555); $$ = $1; }
-| n556 "556" { assert ($1 == 556); $$ = $1; }
-| n557 "557" { assert ($1 == 557); $$ = $1; }
-| n558 "558" { assert ($1 == 558); $$ = $1; }
-| n559 "559" { assert ($1 == 559); $$ = $1; }
-| n560 "560" { assert ($1 == 560); $$ = $1; }
-| n561 "561" { assert ($1 == 561); $$ = $1; }
-| n562 "562" { assert ($1 == 562); $$ = $1; }
-| n563 "563" { assert ($1 == 563); $$ = $1; }
-| n564 "564" { assert ($1 == 564); $$ = $1; }
-| n565 "565" { assert ($1 == 565); $$ = $1; }
-| n566 "566" { assert ($1 == 566); $$ = $1; }
-| n567 "567" { assert ($1 == 567); $$ = $1; }
-| n568 "568" { assert ($1 == 568); $$ = $1; }
-| n569 "569" { assert ($1 == 569); $$ = $1; }
-| n570 "570" { assert ($1 == 570); $$ = $1; }
-| n571 "571" { assert ($1 == 571); $$ = $1; }
-| n572 "572" { assert ($1 == 572); $$ = $1; }
-| n573 "573" { assert ($1 == 573); $$ = $1; }
-| n574 "574" { assert ($1 == 574); $$ = $1; }
-| n575 "575" { assert ($1 == 575); $$ = $1; }
-| n576 "576" { assert ($1 == 576); $$ = $1; }
-| n577 "577" { assert ($1 == 577); $$ = $1; }
-| n578 "578" { assert ($1 == 578); $$ = $1; }
-| n579 "579" { assert ($1 == 579); $$ = $1; }
-| n580 "580" { assert ($1 == 580); $$ = $1; }
-| n581 "581" { assert ($1 == 581); $$ = $1; }
-| n582 "582" { assert ($1 == 582); $$ = $1; }
-| n583 "583" { assert ($1 == 583); $$ = $1; }
-| n584 "584" { assert ($1 == 584); $$ = $1; }
-| n585 "585" { assert ($1 == 585); $$ = $1; }
-| n586 "586" { assert ($1 == 586); $$ = $1; }
-| n587 "587" { assert ($1 == 587); $$ = $1; }
-| n588 "588" { assert ($1 == 588); $$ = $1; }
-| n589 "589" { assert ($1 == 589); $$ = $1; }
-| n590 "590" { assert ($1 == 590); $$ = $1; }
-| n591 "591" { assert ($1 == 591); $$ = $1; }
-| n592 "592" { assert ($1 == 592); $$ = $1; }
-| n593 "593" { assert ($1 == 593); $$ = $1; }
-| n594 "594" { assert ($1 == 594); $$ = $1; }
-| n595 "595" { assert ($1 == 595); $$ = $1; }
-| n596 "596" { assert ($1 == 596); $$ = $1; }
-| n597 "597" { assert ($1 == 597); $$ = $1; }
-| n598 "598" { assert ($1 == 598); $$ = $1; }
-| n599 "599" { assert ($1 == 599); $$ = $1; }
-| n600 "600" { assert ($1 == 600); $$ = $1; }
-| n601 "601" { assert ($1 == 601); $$ = $1; }
-| n602 "602" { assert ($1 == 602); $$ = $1; }
-| n603 "603" { assert ($1 == 603); $$ = $1; }
-| n604 "604" { assert ($1 == 604); $$ = $1; }
-| n605 "605" { assert ($1 == 605); $$ = $1; }
-| n606 "606" { assert ($1 == 606); $$ = $1; }
-| n607 "607" { assert ($1 == 607); $$ = $1; }
-| n608 "608" { assert ($1 == 608); $$ = $1; }
-| n609 "609" { assert ($1 == 609); $$ = $1; }
-| n610 "610" { assert ($1 == 610); $$ = $1; }
-| n611 "611" { assert ($1 == 611); $$ = $1; }
-| n612 "612" { assert ($1 == 612); $$ = $1; }
-| n613 "613" { assert ($1 == 613); $$ = $1; }
-| n614 "614" { assert ($1 == 614); $$ = $1; }
-| n615 "615" { assert ($1 == 615); $$ = $1; }
-| n616 "616" { assert ($1 == 616); $$ = $1; }
-| n617 "617" { assert ($1 == 617); $$ = $1; }
-| n618 "618" { assert ($1 == 618); $$ = $1; }
-| n619 "619" { assert ($1 == 619); $$ = $1; }
-| n620 "620" { assert ($1 == 620); $$ = $1; }
-| n621 "621" { assert ($1 == 621); $$ = $1; }
-| n622 "622" { assert ($1 == 622); $$ = $1; }
-| n623 "623" { assert ($1 == 623); $$ = $1; }
-| n624 "624" { assert ($1 == 624); $$ = $1; }
-| n625 "625" { assert ($1 == 625); $$ = $1; }
-| n626 "626" { assert ($1 == 626); $$ = $1; }
-| n627 "627" { assert ($1 == 627); $$ = $1; }
-| n628 "628" { assert ($1 == 628); $$ = $1; }
-| n629 "629" { assert ($1 == 629); $$ = $1; }
-| n630 "630" { assert ($1 == 630); $$ = $1; }
-| n631 "631" { assert ($1 == 631); $$ = $1; }
-| n632 "632" { assert ($1 == 632); $$ = $1; }
-| n633 "633" { assert ($1 == 633); $$ = $1; }
-| n634 "634" { assert ($1 == 634); $$ = $1; }
-| n635 "635" { assert ($1 == 635); $$ = $1; }
-| n636 "636" { assert ($1 == 636); $$ = $1; }
-| n637 "637" { assert ($1 == 637); $$ = $1; }
-| n638 "638" { assert ($1 == 638); $$ = $1; }
-| n639 "639" { assert ($1 == 639); $$ = $1; }
-| n640 "640" { assert ($1 == 640); $$ = $1; }
-| n641 "641" { assert ($1 == 641); $$ = $1; }
-| n642 "642" { assert ($1 == 642); $$ = $1; }
-| n643 "643" { assert ($1 == 643); $$ = $1; }
-| n644 "644" { assert ($1 == 644); $$ = $1; }
-| n645 "645" { assert ($1 == 645); $$ = $1; }
-| n646 "646" { assert ($1 == 646); $$ = $1; }
-| n647 "647" { assert ($1 == 647); $$ = $1; }
-| n648 "648" { assert ($1 == 648); $$ = $1; }
-| n649 "649" { assert ($1 == 649); $$ = $1; }
-| n650 "650" { assert ($1 == 650); $$ = $1; }
-| n651 "651" { assert ($1 == 651); $$ = $1; }
-| n652 "652" { assert ($1 == 652); $$ = $1; }
-| n653 "653" { assert ($1 == 653); $$ = $1; }
-| n654 "654" { assert ($1 == 654); $$ = $1; }
-| n655 "655" { assert ($1 == 655); $$ = $1; }
-| n656 "656" { assert ($1 == 656); $$ = $1; }
-| n657 "657" { assert ($1 == 657); $$ = $1; }
-| n658 "658" { assert ($1 == 658); $$ = $1; }
-| n659 "659" { assert ($1 == 659); $$ = $1; }
-| n660 "660" { assert ($1 == 660); $$ = $1; }
-| n661 "661" { assert ($1 == 661); $$ = $1; }
-| n662 "662" { assert ($1 == 662); $$ = $1; }
-| n663 "663" { assert ($1 == 663); $$ = $1; }
-| n664 "664" { assert ($1 == 664); $$ = $1; }
-| n665 "665" { assert ($1 == 665); $$ = $1; }
-| n666 "666" { assert ($1 == 666); $$ = $1; }
-| n667 "667" { assert ($1 == 667); $$ = $1; }
-| n668 "668" { assert ($1 == 668); $$ = $1; }
-| n669 "669" { assert ($1 == 669); $$ = $1; }
-| n670 "670" { assert ($1 == 670); $$ = $1; }
-| n671 "671" { assert ($1 == 671); $$ = $1; }
-| n672 "672" { assert ($1 == 672); $$ = $1; }
-| n673 "673" { assert ($1 == 673); $$ = $1; }
-| n674 "674" { assert ($1 == 674); $$ = $1; }
-| n675 "675" { assert ($1 == 675); $$ = $1; }
-| n676 "676" { assert ($1 == 676); $$ = $1; }
-| n677 "677" { assert ($1 == 677); $$ = $1; }
-| n678 "678" { assert ($1 == 678); $$ = $1; }
-| n679 "679" { assert ($1 == 679); $$ = $1; }
-| n680 "680" { assert ($1 == 680); $$ = $1; }
-| n681 "681" { assert ($1 == 681); $$ = $1; }
-| n682 "682" { assert ($1 == 682); $$ = $1; }
-| n683 "683" { assert ($1 == 683); $$ = $1; }
-| n684 "684" { assert ($1 == 684); $$ = $1; }
-| n685 "685" { assert ($1 == 685); $$ = $1; }
-| n686 "686" { assert ($1 == 686); $$ = $1; }
-| n687 "687" { assert ($1 == 687); $$ = $1; }
-| n688 "688" { assert ($1 == 688); $$ = $1; }
-| n689 "689" { assert ($1 == 689); $$ = $1; }
-| n690 "690" { assert ($1 == 690); $$ = $1; }
-| n691 "691" { assert ($1 == 691); $$ = $1; }
-| n692 "692" { assert ($1 == 692); $$ = $1; }
-| n693 "693" { assert ($1 == 693); $$ = $1; }
-| n694 "694" { assert ($1 == 694); $$ = $1; }
-| n695 "695" { assert ($1 == 695); $$ = $1; }
-| n696 "696" { assert ($1 == 696); $$ = $1; }
-| n697 "697" { assert ($1 == 697); $$ = $1; }
-| n698 "698" { assert ($1 == 698); $$ = $1; }
-| n699 "699" { assert ($1 == 699); $$ = $1; }
-| n700 "700" { assert ($1 == 700); $$ = $1; }
-| n701 "701" { assert ($1 == 701); $$ = $1; }
-| n702 "702" { assert ($1 == 702); $$ = $1; }
-| n703 "703" { assert ($1 == 703); $$ = $1; }
-| n704 "704" { assert ($1 == 704); $$ = $1; }
-| n705 "705" { assert ($1 == 705); $$ = $1; }
-| n706 "706" { assert ($1 == 706); $$ = $1; }
-| n707 "707" { assert ($1 == 707); $$ = $1; }
-| n708 "708" { assert ($1 == 708); $$ = $1; }
-| n709 "709" { assert ($1 == 709); $$ = $1; }
-| n710 "710" { assert ($1 == 710); $$ = $1; }
-| n711 "711" { assert ($1 == 711); $$ = $1; }
-| n712 "712" { assert ($1 == 712); $$ = $1; }
-| n713 "713" { assert ($1 == 713); $$ = $1; }
-| n714 "714" { assert ($1 == 714); $$ = $1; }
-| n715 "715" { assert ($1 == 715); $$ = $1; }
-| n716 "716" { assert ($1 == 716); $$ = $1; }
-| n717 "717" { assert ($1 == 717); $$ = $1; }
-| n718 "718" { assert ($1 == 718); $$ = $1; }
-| n719 "719" { assert ($1 == 719); $$ = $1; }
-| n720 "720" { assert ($1 == 720); $$ = $1; }
-| n721 "721" { assert ($1 == 721); $$ = $1; }
-| n722 "722" { assert ($1 == 722); $$ = $1; }
-| n723 "723" { assert ($1 == 723); $$ = $1; }
-| n724 "724" { assert ($1 == 724); $$ = $1; }
-| n725 "725" { assert ($1 == 725); $$ = $1; }
-| n726 "726" { assert ($1 == 726); $$ = $1; }
-| n727 "727" { assert ($1 == 727); $$ = $1; }
-| n728 "728" { assert ($1 == 728); $$ = $1; }
-| n729 "729" { assert ($1 == 729); $$ = $1; }
-| n730 "730" { assert ($1 == 730); $$ = $1; }
-| n731 "731" { assert ($1 == 731); $$ = $1; }
-| n732 "732" { assert ($1 == 732); $$ = $1; }
-| n733 "733" { assert ($1 == 733); $$ = $1; }
-| n734 "734" { assert ($1 == 734); $$ = $1; }
-| n735 "735" { assert ($1 == 735); $$ = $1; }
-| n736 "736" { assert ($1 == 736); $$ = $1; }
-| n737 "737" { assert ($1 == 737); $$ = $1; }
-| n738 "738" { assert ($1 == 738); $$ = $1; }
-| n739 "739" { assert ($1 == 739); $$ = $1; }
-| n740 "740" { assert ($1 == 740); $$ = $1; }
-| n741 "741" { assert ($1 == 741); $$ = $1; }
-| n742 "742" { assert ($1 == 742); $$ = $1; }
-| n743 "743" { assert ($1 == 743); $$ = $1; }
-| n744 "744" { assert ($1 == 744); $$ = $1; }
-| n745 "745" { assert ($1 == 745); $$ = $1; }
-| n746 "746" { assert ($1 == 746); $$ = $1; }
-| n747 "747" { assert ($1 == 747); $$ = $1; }
-| n748 "748" { assert ($1 == 748); $$ = $1; }
-| n749 "749" { assert ($1 == 749); $$ = $1; }
-| n750 "750" { assert ($1 == 750); $$ = $1; }
-| n751 "751" { assert ($1 == 751); $$ = $1; }
-| n752 "752" { assert ($1 == 752); $$ = $1; }
-| n753 "753" { assert ($1 == 753); $$ = $1; }
-| n754 "754" { assert ($1 == 754); $$ = $1; }
-| n755 "755" { assert ($1 == 755); $$ = $1; }
-| n756 "756" { assert ($1 == 756); $$ = $1; }
-| n757 "757" { assert ($1 == 757); $$ = $1; }
-| n758 "758" { assert ($1 == 758); $$ = $1; }
-| n759 "759" { assert ($1 == 759); $$ = $1; }
-| n760 "760" { assert ($1 == 760); $$ = $1; }
-| n761 "761" { assert ($1 == 761); $$ = $1; }
-| n762 "762" { assert ($1 == 762); $$ = $1; }
-| n763 "763" { assert ($1 == 763); $$ = $1; }
-| n764 "764" { assert ($1 == 764); $$ = $1; }
-| n765 "765" { assert ($1 == 765); $$ = $1; }
-| n766 "766" { assert ($1 == 766); $$ = $1; }
-| n767 "767" { assert ($1 == 767); $$ = $1; }
-| n768 "768" { assert ($1 == 768); $$ = $1; }
-| n769 "769" { assert ($1 == 769); $$ = $1; }
-| n770 "770" { assert ($1 == 770); $$ = $1; }
-| n771 "771" { assert ($1 == 771); $$ = $1; }
-| n772 "772" { assert ($1 == 772); $$ = $1; }
-| n773 "773" { assert ($1 == 773); $$ = $1; }
-| n774 "774" { assert ($1 == 774); $$ = $1; }
-| n775 "775" { assert ($1 == 775); $$ = $1; }
-| n776 "776" { assert ($1 == 776); $$ = $1; }
-| n777 "777" { assert ($1 == 777); $$ = $1; }
-| n778 "778" { assert ($1 == 778); $$ = $1; }
-| n779 "779" { assert ($1 == 779); $$ = $1; }
-| n780 "780" { assert ($1 == 780); $$ = $1; }
-| n781 "781" { assert ($1 == 781); $$ = $1; }
-| n782 "782" { assert ($1 == 782); $$ = $1; }
-| n783 "783" { assert ($1 == 783); $$ = $1; }
-| n784 "784" { assert ($1 == 784); $$ = $1; }
-| n785 "785" { assert ($1 == 785); $$ = $1; }
-| n786 "786" { assert ($1 == 786); $$ = $1; }
-| n787 "787" { assert ($1 == 787); $$ = $1; }
-| n788 "788" { assert ($1 == 788); $$ = $1; }
-| n789 "789" { assert ($1 == 789); $$ = $1; }
-| n790 "790" { assert ($1 == 790); $$ = $1; }
-| n791 "791" { assert ($1 == 791); $$ = $1; }
-| n792 "792" { assert ($1 == 792); $$ = $1; }
-| n793 "793" { assert ($1 == 793); $$ = $1; }
-| n794 "794" { assert ($1 == 794); $$ = $1; }
-| n795 "795" { assert ($1 == 795); $$ = $1; }
-| n796 "796" { assert ($1 == 796); $$ = $1; }
-| n797 "797" { assert ($1 == 797); $$ = $1; }
-| n798 "798" { assert ($1 == 798); $$ = $1; }
-| n799 "799" { assert ($1 == 799); $$ = $1; }
-| n800 "800" { assert ($1 == 800); $$ = $1; }
-| n801 "801" { assert ($1 == 801); $$ = $1; }
-| n802 "802" { assert ($1 == 802); $$ = $1; }
-| n803 "803" { assert ($1 == 803); $$ = $1; }
-| n804 "804" { assert ($1 == 804); $$ = $1; }
-| n805 "805" { assert ($1 == 805); $$ = $1; }
-| n806 "806" { assert ($1 == 806); $$ = $1; }
-| n807 "807" { assert ($1 == 807); $$ = $1; }
-| n808 "808" { assert ($1 == 808); $$ = $1; }
-| n809 "809" { assert ($1 == 809); $$ = $1; }
-| n810 "810" { assert ($1 == 810); $$ = $1; }
-| n811 "811" { assert ($1 == 811); $$ = $1; }
-| n812 "812" { assert ($1 == 812); $$ = $1; }
-| n813 "813" { assert ($1 == 813); $$ = $1; }
-| n814 "814" { assert ($1 == 814); $$ = $1; }
-| n815 "815" { assert ($1 == 815); $$ = $1; }
-| n816 "816" { assert ($1 == 816); $$ = $1; }
-| n817 "817" { assert ($1 == 817); $$ = $1; }
-| n818 "818" { assert ($1 == 818); $$ = $1; }
-| n819 "819" { assert ($1 == 819); $$ = $1; }
-| n820 "820" { assert ($1 == 820); $$ = $1; }
-| n821 "821" { assert ($1 == 821); $$ = $1; }
-| n822 "822" { assert ($1 == 822); $$ = $1; }
-| n823 "823" { assert ($1 == 823); $$ = $1; }
-| n824 "824" { assert ($1 == 824); $$ = $1; }
-| n825 "825" { assert ($1 == 825); $$ = $1; }
-| n826 "826" { assert ($1 == 826); $$ = $1; }
-| n827 "827" { assert ($1 == 827); $$ = $1; }
-| n828 "828" { assert ($1 == 828); $$ = $1; }
-| n829 "829" { assert ($1 == 829); $$ = $1; }
-| n830 "830" { assert ($1 == 830); $$ = $1; }
-| n831 "831" { assert ($1 == 831); $$ = $1; }
-| n832 "832" { assert ($1 == 832); $$ = $1; }
-| n833 "833" { assert ($1 == 833); $$ = $1; }
-| n834 "834" { assert ($1 == 834); $$ = $1; }
-| n835 "835" { assert ($1 == 835); $$ = $1; }
-| n836 "836" { assert ($1 == 836); $$ = $1; }
-| n837 "837" { assert ($1 == 837); $$ = $1; }
-| n838 "838" { assert ($1 == 838); $$ = $1; }
-| n839 "839" { assert ($1 == 839); $$ = $1; }
-| n840 "840" { assert ($1 == 840); $$ = $1; }
-| n841 "841" { assert ($1 == 841); $$ = $1; }
-| n842 "842" { assert ($1 == 842); $$ = $1; }
-| n843 "843" { assert ($1 == 843); $$ = $1; }
-| n844 "844" { assert ($1 == 844); $$ = $1; }
-| n845 "845" { assert ($1 == 845); $$ = $1; }
-| n846 "846" { assert ($1 == 846); $$ = $1; }
-| n847 "847" { assert ($1 == 847); $$ = $1; }
-| n848 "848" { assert ($1 == 848); $$ = $1; }
-| n849 "849" { assert ($1 == 849); $$ = $1; }
-| n850 "850" { assert ($1 == 850); $$ = $1; }
-| n851 "851" { assert ($1 == 851); $$ = $1; }
-| n852 "852" { assert ($1 == 852); $$ = $1; }
-| n853 "853" { assert ($1 == 853); $$ = $1; }
-| n854 "854" { assert ($1 == 854); $$ = $1; }
-| n855 "855" { assert ($1 == 855); $$ = $1; }
-| n856 "856" { assert ($1 == 856); $$ = $1; }
-| n857 "857" { assert ($1 == 857); $$ = $1; }
-| n858 "858" { assert ($1 == 858); $$ = $1; }
-| n859 "859" { assert ($1 == 859); $$ = $1; }
-| n860 "860" { assert ($1 == 860); $$ = $1; }
-| n861 "861" { assert ($1 == 861); $$ = $1; }
-| n862 "862" { assert ($1 == 862); $$ = $1; }
-| n863 "863" { assert ($1 == 863); $$ = $1; }
-| n864 "864" { assert ($1 == 864); $$ = $1; }
-| n865 "865" { assert ($1 == 865); $$ = $1; }
-| n866 "866" { assert ($1 == 866); $$ = $1; }
-| n867 "867" { assert ($1 == 867); $$ = $1; }
-| n868 "868" { assert ($1 == 868); $$ = $1; }
-| n869 "869" { assert ($1 == 869); $$ = $1; }
-| n870 "870" { assert ($1 == 870); $$ = $1; }
-| n871 "871" { assert ($1 == 871); $$ = $1; }
-| n872 "872" { assert ($1 == 872); $$ = $1; }
-| n873 "873" { assert ($1 == 873); $$ = $1; }
-| n874 "874" { assert ($1 == 874); $$ = $1; }
-| n875 "875" { assert ($1 == 875); $$ = $1; }
-| n876 "876" { assert ($1 == 876); $$ = $1; }
-| n877 "877" { assert ($1 == 877); $$ = $1; }
-| n878 "878" { assert ($1 == 878); $$ = $1; }
-| n879 "879" { assert ($1 == 879); $$ = $1; }
-| n880 "880" { assert ($1 == 880); $$ = $1; }
-| n881 "881" { assert ($1 == 881); $$ = $1; }
-| n882 "882" { assert ($1 == 882); $$ = $1; }
-| n883 "883" { assert ($1 == 883); $$ = $1; }
-| n884 "884" { assert ($1 == 884); $$ = $1; }
-| n885 "885" { assert ($1 == 885); $$ = $1; }
-| n886 "886" { assert ($1 == 886); $$ = $1; }
-| n887 "887" { assert ($1 == 887); $$ = $1; }
-| n888 "888" { assert ($1 == 888); $$ = $1; }
-| n889 "889" { assert ($1 == 889); $$ = $1; }
-| n890 "890" { assert ($1 == 890); $$ = $1; }
-| n891 "891" { assert ($1 == 891); $$ = $1; }
-| n892 "892" { assert ($1 == 892); $$ = $1; }
-| n893 "893" { assert ($1 == 893); $$ = $1; }
-| n894 "894" { assert ($1 == 894); $$ = $1; }
-| n895 "895" { assert ($1 == 895); $$ = $1; }
-| n896 "896" { assert ($1 == 896); $$ = $1; }
-| n897 "897" { assert ($1 == 897); $$ = $1; }
-| n898 "898" { assert ($1 == 898); $$ = $1; }
-| n899 "899" { assert ($1 == 899); $$ = $1; }
-| n900 "900" { assert ($1 == 900); $$ = $1; }
-| n901 "901" { assert ($1 == 901); $$ = $1; }
-| n902 "902" { assert ($1 == 902); $$ = $1; }
-| n903 "903" { assert ($1 == 903); $$ = $1; }
-| n904 "904" { assert ($1 == 904); $$ = $1; }
-| n905 "905" { assert ($1 == 905); $$ = $1; }
-| n906 "906" { assert ($1 == 906); $$ = $1; }
-| n907 "907" { assert ($1 == 907); $$ = $1; }
-| n908 "908" { assert ($1 == 908); $$ = $1; }
-| n909 "909" { assert ($1 == 909); $$ = $1; }
-| n910 "910" { assert ($1 == 910); $$ = $1; }
-| n911 "911" { assert ($1 == 911); $$ = $1; }
-| n912 "912" { assert ($1 == 912); $$ = $1; }
-| n913 "913" { assert ($1 == 913); $$ = $1; }
-| n914 "914" { assert ($1 == 914); $$ = $1; }
-| n915 "915" { assert ($1 == 915); $$ = $1; }
-| n916 "916" { assert ($1 == 916); $$ = $1; }
-| n917 "917" { assert ($1 == 917); $$ = $1; }
-| n918 "918" { assert ($1 == 918); $$ = $1; }
-| n919 "919" { assert ($1 == 919); $$ = $1; }
-| n920 "920" { assert ($1 == 920); $$ = $1; }
-| n921 "921" { assert ($1 == 921); $$ = $1; }
-| n922 "922" { assert ($1 == 922); $$ = $1; }
-| n923 "923" { assert ($1 == 923); $$ = $1; }
-| n924 "924" { assert ($1 == 924); $$ = $1; }
-| n925 "925" { assert ($1 == 925); $$ = $1; }
-| n926 "926" { assert ($1 == 926); $$ = $1; }
-| n927 "927" { assert ($1 == 927); $$ = $1; }
-| n928 "928" { assert ($1 == 928); $$ = $1; }
-| n929 "929" { assert ($1 == 929); $$ = $1; }
-| n930 "930" { assert ($1 == 930); $$ = $1; }
-| n931 "931" { assert ($1 == 931); $$ = $1; }
-| n932 "932" { assert ($1 == 932); $$ = $1; }
-| n933 "933" { assert ($1 == 933); $$ = $1; }
-| n934 "934" { assert ($1 == 934); $$ = $1; }
-| n935 "935" { assert ($1 == 935); $$ = $1; }
-| n936 "936" { assert ($1 == 936); $$ = $1; }
-| n937 "937" { assert ($1 == 937); $$ = $1; }
-| n938 "938" { assert ($1 == 938); $$ = $1; }
-| n939 "939" { assert ($1 == 939); $$ = $1; }
-| n940 "940" { assert ($1 == 940); $$ = $1; }
-| n941 "941" { assert ($1 == 941); $$ = $1; }
-| n942 "942" { assert ($1 == 942); $$ = $1; }
-| n943 "943" { assert ($1 == 943); $$ = $1; }
-| n944 "944" { assert ($1 == 944); $$ = $1; }
-| n945 "945" { assert ($1 == 945); $$ = $1; }
-| n946 "946" { assert ($1 == 946); $$ = $1; }
-| n947 "947" { assert ($1 == 947); $$ = $1; }
-| n948 "948" { assert ($1 == 948); $$ = $1; }
-| n949 "949" { assert ($1 == 949); $$ = $1; }
-| n950 "950" { assert ($1 == 950); $$ = $1; }
-| n951 "951" { assert ($1 == 951); $$ = $1; }
-| n952 "952" { assert ($1 == 952); $$ = $1; }
-| n953 "953" { assert ($1 == 953); $$ = $1; }
-| n954 "954" { assert ($1 == 954); $$ = $1; }
-| n955 "955" { assert ($1 == 955); $$ = $1; }
-| n956 "956" { assert ($1 == 956); $$ = $1; }
-| n957 "957" { assert ($1 == 957); $$ = $1; }
-| n958 "958" { assert ($1 == 958); $$ = $1; }
-| n959 "959" { assert ($1 == 959); $$ = $1; }
-| n960 "960" { assert ($1 == 960); $$ = $1; }
-| n961 "961" { assert ($1 == 961); $$ = $1; }
-| n962 "962" { assert ($1 == 962); $$ = $1; }
-| n963 "963" { assert ($1 == 963); $$ = $1; }
-| n964 "964" { assert ($1 == 964); $$ = $1; }
-| n965 "965" { assert ($1 == 965); $$ = $1; }
-| n966 "966" { assert ($1 == 966); $$ = $1; }
-| n967 "967" { assert ($1 == 967); $$ = $1; }
-| n968 "968" { assert ($1 == 968); $$ = $1; }
-| n969 "969" { assert ($1 == 969); $$ = $1; }
-| n970 "970" { assert ($1 == 970); $$ = $1; }
-| n971 "971" { assert ($1 == 971); $$ = $1; }
-| n972 "972" { assert ($1 == 972); $$ = $1; }
-| n973 "973" { assert ($1 == 973); $$ = $1; }
-| n974 "974" { assert ($1 == 974); $$ = $1; }
-| n975 "975" { assert ($1 == 975); $$ = $1; }
-| n976 "976" { assert ($1 == 976); $$ = $1; }
-| n977 "977" { assert ($1 == 977); $$ = $1; }
-| n978 "978" { assert ($1 == 978); $$ = $1; }
-| n979 "979" { assert ($1 == 979); $$ = $1; }
-| n980 "980" { assert ($1 == 980); $$ = $1; }
-| n981 "981" { assert ($1 == 981); $$ = $1; }
-| n982 "982" { assert ($1 == 982); $$ = $1; }
-| n983 "983" { assert ($1 == 983); $$ = $1; }
-| n984 "984" { assert ($1 == 984); $$ = $1; }
-| n985 "985" { assert ($1 == 985); $$ = $1; }
-| n986 "986" { assert ($1 == 986); $$ = $1; }
-| n987 "987" { assert ($1 == 987); $$ = $1; }
-| n988 "988" { assert ($1 == 988); $$ = $1; }
-| n989 "989" { assert ($1 == 989); $$ = $1; }
-| n990 "990" { assert ($1 == 990); $$ = $1; }
-| n991 "991" { assert ($1 == 991); $$ = $1; }
-| n992 "992" { assert ($1 == 992); $$ = $1; }
-| n993 "993" { assert ($1 == 993); $$ = $1; }
-| n994 "994" { assert ($1 == 994); $$ = $1; }
-| n995 "995" { assert ($1 == 995); $$ = $1; }
-| n996 "996" { assert ($1 == 996); $$ = $1; }
-| n997 "997" { assert ($1 == 997); $$ = $1; }
-| n998 "998" { assert ($1 == 998); $$ = $1; }
-| n999 "999" { assert ($1 == 999); $$ = $1; }
-| n1000 "1000" { assert ($1 == 1000); $$ = $1; }
-;
-n1: token { $$ = 1; };
-n2: token { $$ = 2; };
-n3: token { $$ = 3; };
-n4: token { $$ = 4; };
-n5: token { $$ = 5; };
-n6: token { $$ = 6; };
-n7: token { $$ = 7; };
-n8: token { $$ = 8; };
-n9: token { $$ = 9; };
-n10: token { $$ = 10; };
-n11: token { $$ = 11; };
-n12: token { $$ = 12; };
-n13: token { $$ = 13; };
-n14: token { $$ = 14; };
-n15: token { $$ = 15; };
-n16: token { $$ = 16; };
-n17: token { $$ = 17; };
-n18: token { $$ = 18; };
-n19: token { $$ = 19; };
-n20: token { $$ = 20; };
-n21: token { $$ = 21; };
-n22: token { $$ = 22; };
-n23: token { $$ = 23; };
-n24: token { $$ = 24; };
-n25: token { $$ = 25; };
-n26: token { $$ = 26; };
-n27: token { $$ = 27; };
-n28: token { $$ = 28; };
-n29: token { $$ = 29; };
-n30: token { $$ = 30; };
-n31: token { $$ = 31; };
-n32: token { $$ = 32; };
-n33: token { $$ = 33; };
-n34: token { $$ = 34; };
-n35: token { $$ = 35; };
-n36: token { $$ = 36; };
-n37: token { $$ = 37; };
-n38: token { $$ = 38; };
-n39: token { $$ = 39; };
-n40: token { $$ = 40; };
-n41: token { $$ = 41; };
-n42: token { $$ = 42; };
-n43: token { $$ = 43; };
-n44: token { $$ = 44; };
-n45: token { $$ = 45; };
-n46: token { $$ = 46; };
-n47: token { $$ = 47; };
-n48: token { $$ = 48; };
-n49: token { $$ = 49; };
-n50: token { $$ = 50; };
-n51: token { $$ = 51; };
-n52: token { $$ = 52; };
-n53: token { $$ = 53; };
-n54: token { $$ = 54; };
-n55: token { $$ = 55; };
-n56: token { $$ = 56; };
-n57: token { $$ = 57; };
-n58: token { $$ = 58; };
-n59: token { $$ = 59; };
-n60: token { $$ = 60; };
-n61: token { $$ = 61; };
-n62: token { $$ = 62; };
-n63: token { $$ = 63; };
-n64: token { $$ = 64; };
-n65: token { $$ = 65; };
-n66: token { $$ = 66; };
-n67: token { $$ = 67; };
-n68: token { $$ = 68; };
-n69: token { $$ = 69; };
-n70: token { $$ = 70; };
-n71: token { $$ = 71; };
-n72: token { $$ = 72; };
-n73: token { $$ = 73; };
-n74: token { $$ = 74; };
-n75: token { $$ = 75; };
-n76: token { $$ = 76; };
-n77: token { $$ = 77; };
-n78: token { $$ = 78; };
-n79: token { $$ = 79; };
-n80: token { $$ = 80; };
-n81: token { $$ = 81; };
-n82: token { $$ = 82; };
-n83: token { $$ = 83; };
-n84: token { $$ = 84; };
-n85: token { $$ = 85; };
-n86: token { $$ = 86; };
-n87: token { $$ = 87; };
-n88: token { $$ = 88; };
-n89: token { $$ = 89; };
-n90: token { $$ = 90; };
-n91: token { $$ = 91; };
-n92: token { $$ = 92; };
-n93: token { $$ = 93; };
-n94: token { $$ = 94; };
-n95: token { $$ = 95; };
-n96: token { $$ = 96; };
-n97: token { $$ = 97; };
-n98: token { $$ = 98; };
-n99: token { $$ = 99; };
-n100: token { $$ = 100; };
-n101: token { $$ = 101; };
-n102: token { $$ = 102; };
-n103: token { $$ = 103; };
-n104: token { $$ = 104; };
-n105: token { $$ = 105; };
-n106: token { $$ = 106; };
-n107: token { $$ = 107; };
-n108: token { $$ = 108; };
-n109: token { $$ = 109; };
-n110: token { $$ = 110; };
-n111: token { $$ = 111; };
-n112: token { $$ = 112; };
-n113: token { $$ = 113; };
-n114: token { $$ = 114; };
-n115: token { $$ = 115; };
-n116: token { $$ = 116; };
-n117: token { $$ = 117; };
-n118: token { $$ = 118; };
-n119: token { $$ = 119; };
-n120: token { $$ = 120; };
-n121: token { $$ = 121; };
-n122: token { $$ = 122; };
-n123: token { $$ = 123; };
-n124: token { $$ = 124; };
-n125: token { $$ = 125; };
-n126: token { $$ = 126; };
-n127: token { $$ = 127; };
-n128: token { $$ = 128; };
-n129: token { $$ = 129; };
-n130: token { $$ = 130; };
-n131: token { $$ = 131; };
-n132: token { $$ = 132; };
-n133: token { $$ = 133; };
-n134: token { $$ = 134; };
-n135: token { $$ = 135; };
-n136: token { $$ = 136; };
-n137: token { $$ = 137; };
-n138: token { $$ = 138; };
-n139: token { $$ = 139; };
-n140: token { $$ = 140; };
-n141: token { $$ = 141; };
-n142: token { $$ = 142; };
-n143: token { $$ = 143; };
-n144: token { $$ = 144; };
-n145: token { $$ = 145; };
-n146: token { $$ = 146; };
-n147: token { $$ = 147; };
-n148: token { $$ = 148; };
-n149: token { $$ = 149; };
-n150: token { $$ = 150; };
-n151: token { $$ = 151; };
-n152: token { $$ = 152; };
-n153: token { $$ = 153; };
-n154: token { $$ = 154; };
-n155: token { $$ = 155; };
-n156: token { $$ = 156; };
-n157: token { $$ = 157; };
-n158: token { $$ = 158; };
-n159: token { $$ = 159; };
-n160: token { $$ = 160; };
-n161: token { $$ = 161; };
-n162: token { $$ = 162; };
-n163: token { $$ = 163; };
-n164: token { $$ = 164; };
-n165: token { $$ = 165; };
-n166: token { $$ = 166; };
-n167: token { $$ = 167; };
-n168: token { $$ = 168; };
-n169: token { $$ = 169; };
-n170: token { $$ = 170; };
-n171: token { $$ = 171; };
-n172: token { $$ = 172; };
-n173: token { $$ = 173; };
-n174: token { $$ = 174; };
-n175: token { $$ = 175; };
-n176: token { $$ = 176; };
-n177: token { $$ = 177; };
-n178: token { $$ = 178; };
-n179: token { $$ = 179; };
-n180: token { $$ = 180; };
-n181: token { $$ = 181; };
-n182: token { $$ = 182; };
-n183: token { $$ = 183; };
-n184: token { $$ = 184; };
-n185: token { $$ = 185; };
-n186: token { $$ = 186; };
-n187: token { $$ = 187; };
-n188: token { $$ = 188; };
-n189: token { $$ = 189; };
-n190: token { $$ = 190; };
-n191: token { $$ = 191; };
-n192: token { $$ = 192; };
-n193: token { $$ = 193; };
-n194: token { $$ = 194; };
-n195: token { $$ = 195; };
-n196: token { $$ = 196; };
-n197: token { $$ = 197; };
-n198: token { $$ = 198; };
-n199: token { $$ = 199; };
-n200: token { $$ = 200; };
-n201: token { $$ = 201; };
-n202: token { $$ = 202; };
-n203: token { $$ = 203; };
-n204: token { $$ = 204; };
-n205: token { $$ = 205; };
-n206: token { $$ = 206; };
-n207: token { $$ = 207; };
-n208: token { $$ = 208; };
-n209: token { $$ = 209; };
-n210: token { $$ = 210; };
-n211: token { $$ = 211; };
-n212: token { $$ = 212; };
-n213: token { $$ = 213; };
-n214: token { $$ = 214; };
-n215: token { $$ = 215; };
-n216: token { $$ = 216; };
-n217: token { $$ = 217; };
-n218: token { $$ = 218; };
-n219: token { $$ = 219; };
-n220: token { $$ = 220; };
-n221: token { $$ = 221; };
-n222: token { $$ = 222; };
-n223: token { $$ = 223; };
-n224: token { $$ = 224; };
-n225: token { $$ = 225; };
-n226: token { $$ = 226; };
-n227: token { $$ = 227; };
-n228: token { $$ = 228; };
-n229: token { $$ = 229; };
-n230: token { $$ = 230; };
-n231: token { $$ = 231; };
-n232: token { $$ = 232; };
-n233: token { $$ = 233; };
-n234: token { $$ = 234; };
-n235: token { $$ = 235; };
-n236: token { $$ = 236; };
-n237: token { $$ = 237; };
-n238: token { $$ = 238; };
-n239: token { $$ = 239; };
-n240: token { $$ = 240; };
-n241: token { $$ = 241; };
-n242: token { $$ = 242; };
-n243: token { $$ = 243; };
-n244: token { $$ = 244; };
-n245: token { $$ = 245; };
-n246: token { $$ = 246; };
-n247: token { $$ = 247; };
-n248: token { $$ = 248; };
-n249: token { $$ = 249; };
-n250: token { $$ = 250; };
-n251: token { $$ = 251; };
-n252: token { $$ = 252; };
-n253: token { $$ = 253; };
-n254: token { $$ = 254; };
-n255: token { $$ = 255; };
-n256: token { $$ = 256; };
-n257: token { $$ = 257; };
-n258: token { $$ = 258; };
-n259: token { $$ = 259; };
-n260: token { $$ = 260; };
-n261: token { $$ = 261; };
-n262: token { $$ = 262; };
-n263: token { $$ = 263; };
-n264: token { $$ = 264; };
-n265: token { $$ = 265; };
-n266: token { $$ = 266; };
-n267: token { $$ = 267; };
-n268: token { $$ = 268; };
-n269: token { $$ = 269; };
-n270: token { $$ = 270; };
-n271: token { $$ = 271; };
-n272: token { $$ = 272; };
-n273: token { $$ = 273; };
-n274: token { $$ = 274; };
-n275: token { $$ = 275; };
-n276: token { $$ = 276; };
-n277: token { $$ = 277; };
-n278: token { $$ = 278; };
-n279: token { $$ = 279; };
-n280: token { $$ = 280; };
-n281: token { $$ = 281; };
-n282: token { $$ = 282; };
-n283: token { $$ = 283; };
-n284: token { $$ = 284; };
-n285: token { $$ = 285; };
-n286: token { $$ = 286; };
-n287: token { $$ = 287; };
-n288: token { $$ = 288; };
-n289: token { $$ = 289; };
-n290: token { $$ = 290; };
-n291: token { $$ = 291; };
-n292: token { $$ = 292; };
-n293: token { $$ = 293; };
-n294: token { $$ = 294; };
-n295: token { $$ = 295; };
-n296: token { $$ = 296; };
-n297: token { $$ = 297; };
-n298: token { $$ = 298; };
-n299: token { $$ = 299; };
-n300: token { $$ = 300; };
-n301: token { $$ = 301; };
-n302: token { $$ = 302; };
-n303: token { $$ = 303; };
-n304: token { $$ = 304; };
-n305: token { $$ = 305; };
-n306: token { $$ = 306; };
-n307: token { $$ = 307; };
-n308: token { $$ = 308; };
-n309: token { $$ = 309; };
-n310: token { $$ = 310; };
-n311: token { $$ = 311; };
-n312: token { $$ = 312; };
-n313: token { $$ = 313; };
-n314: token { $$ = 314; };
-n315: token { $$ = 315; };
-n316: token { $$ = 316; };
-n317: token { $$ = 317; };
-n318: token { $$ = 318; };
-n319: token { $$ = 319; };
-n320: token { $$ = 320; };
-n321: token { $$ = 321; };
-n322: token { $$ = 322; };
-n323: token { $$ = 323; };
-n324: token { $$ = 324; };
-n325: token { $$ = 325; };
-n326: token { $$ = 326; };
-n327: token { $$ = 327; };
-n328: token { $$ = 328; };
-n329: token { $$ = 329; };
-n330: token { $$ = 330; };
-n331: token { $$ = 331; };
-n332: token { $$ = 332; };
-n333: token { $$ = 333; };
-n334: token { $$ = 334; };
-n335: token { $$ = 335; };
-n336: token { $$ = 336; };
-n337: token { $$ = 337; };
-n338: token { $$ = 338; };
-n339: token { $$ = 339; };
-n340: token { $$ = 340; };
-n341: token { $$ = 341; };
-n342: token { $$ = 342; };
-n343: token { $$ = 343; };
-n344: token { $$ = 344; };
-n345: token { $$ = 345; };
-n346: token { $$ = 346; };
-n347: token { $$ = 347; };
-n348: token { $$ = 348; };
-n349: token { $$ = 349; };
-n350: token { $$ = 350; };
-n351: token { $$ = 351; };
-n352: token { $$ = 352; };
-n353: token { $$ = 353; };
-n354: token { $$ = 354; };
-n355: token { $$ = 355; };
-n356: token { $$ = 356; };
-n357: token { $$ = 357; };
-n358: token { $$ = 358; };
-n359: token { $$ = 359; };
-n360: token { $$ = 360; };
-n361: token { $$ = 361; };
-n362: token { $$ = 362; };
-n363: token { $$ = 363; };
-n364: token { $$ = 364; };
-n365: token { $$ = 365; };
-n366: token { $$ = 366; };
-n367: token { $$ = 367; };
-n368: token { $$ = 368; };
-n369: token { $$ = 369; };
-n370: token { $$ = 370; };
-n371: token { $$ = 371; };
-n372: token { $$ = 372; };
-n373: token { $$ = 373; };
-n374: token { $$ = 374; };
-n375: token { $$ = 375; };
-n376: token { $$ = 376; };
-n377: token { $$ = 377; };
-n378: token { $$ = 378; };
-n379: token { $$ = 379; };
-n380: token { $$ = 380; };
-n381: token { $$ = 381; };
-n382: token { $$ = 382; };
-n383: token { $$ = 383; };
-n384: token { $$ = 384; };
-n385: token { $$ = 385; };
-n386: token { $$ = 386; };
-n387: token { $$ = 387; };
-n388: token { $$ = 388; };
-n389: token { $$ = 389; };
-n390: token { $$ = 390; };
-n391: token { $$ = 391; };
-n392: token { $$ = 392; };
-n393: token { $$ = 393; };
-n394: token { $$ = 394; };
-n395: token { $$ = 395; };
-n396: token { $$ = 396; };
-n397: token { $$ = 397; };
-n398: token { $$ = 398; };
-n399: token { $$ = 399; };
-n400: token { $$ = 400; };
-n401: token { $$ = 401; };
-n402: token { $$ = 402; };
-n403: token { $$ = 403; };
-n404: token { $$ = 404; };
-n405: token { $$ = 405; };
-n406: token { $$ = 406; };
-n407: token { $$ = 407; };
-n408: token { $$ = 408; };
-n409: token { $$ = 409; };
-n410: token { $$ = 410; };
-n411: token { $$ = 411; };
-n412: token { $$ = 412; };
-n413: token { $$ = 413; };
-n414: token { $$ = 414; };
-n415: token { $$ = 415; };
-n416: token { $$ = 416; };
-n417: token { $$ = 417; };
-n418: token { $$ = 418; };
-n419: token { $$ = 419; };
-n420: token { $$ = 420; };
-n421: token { $$ = 421; };
-n422: token { $$ = 422; };
-n423: token { $$ = 423; };
-n424: token { $$ = 424; };
-n425: token { $$ = 425; };
-n426: token { $$ = 426; };
-n427: token { $$ = 427; };
-n428: token { $$ = 428; };
-n429: token { $$ = 429; };
-n430: token { $$ = 430; };
-n431: token { $$ = 431; };
-n432: token { $$ = 432; };
-n433: token { $$ = 433; };
-n434: token { $$ = 434; };
-n435: token { $$ = 435; };
-n436: token { $$ = 436; };
-n437: token { $$ = 437; };
-n438: token { $$ = 438; };
-n439: token { $$ = 439; };
-n440: token { $$ = 440; };
-n441: token { $$ = 441; };
-n442: token { $$ = 442; };
-n443: token { $$ = 443; };
-n444: token { $$ = 444; };
-n445: token { $$ = 445; };
-n446: token { $$ = 446; };
-n447: token { $$ = 447; };
-n448: token { $$ = 448; };
-n449: token { $$ = 449; };
-n450: token { $$ = 450; };
-n451: token { $$ = 451; };
-n452: token { $$ = 452; };
-n453: token { $$ = 453; };
-n454: token { $$ = 454; };
-n455: token { $$ = 455; };
-n456: token { $$ = 456; };
-n457: token { $$ = 457; };
-n458: token { $$ = 458; };
-n459: token { $$ = 459; };
-n460: token { $$ = 460; };
-n461: token { $$ = 461; };
-n462: token { $$ = 462; };
-n463: token { $$ = 463; };
-n464: token { $$ = 464; };
-n465: token { $$ = 465; };
-n466: token { $$ = 466; };
-n467: token { $$ = 467; };
-n468: token { $$ = 468; };
-n469: token { $$ = 469; };
-n470: token { $$ = 470; };
-n471: token { $$ = 471; };
-n472: token { $$ = 472; };
-n473: token { $$ = 473; };
-n474: token { $$ = 474; };
-n475: token { $$ = 475; };
-n476: token { $$ = 476; };
-n477: token { $$ = 477; };
-n478: token { $$ = 478; };
-n479: token { $$ = 479; };
-n480: token { $$ = 480; };
-n481: token { $$ = 481; };
-n482: token { $$ = 482; };
-n483: token { $$ = 483; };
-n484: token { $$ = 484; };
-n485: token { $$ = 485; };
-n486: token { $$ = 486; };
-n487: token { $$ = 487; };
-n488: token { $$ = 488; };
-n489: token { $$ = 489; };
-n490: token { $$ = 490; };
-n491: token { $$ = 491; };
-n492: token { $$ = 492; };
-n493: token { $$ = 493; };
-n494: token { $$ = 494; };
-n495: token { $$ = 495; };
-n496: token { $$ = 496; };
-n497: token { $$ = 497; };
-n498: token { $$ = 498; };
-n499: token { $$ = 499; };
-n500: token { $$ = 500; };
-n501: token { $$ = 501; };
-n502: token { $$ = 502; };
-n503: token { $$ = 503; };
-n504: token { $$ = 504; };
-n505: token { $$ = 505; };
-n506: token { $$ = 506; };
-n507: token { $$ = 507; };
-n508: token { $$ = 508; };
-n509: token { $$ = 509; };
-n510: token { $$ = 510; };
-n511: token { $$ = 511; };
-n512: token { $$ = 512; };
-n513: token { $$ = 513; };
-n514: token { $$ = 514; };
-n515: token { $$ = 515; };
-n516: token { $$ = 516; };
-n517: token { $$ = 517; };
-n518: token { $$ = 518; };
-n519: token { $$ = 519; };
-n520: token { $$ = 520; };
-n521: token { $$ = 521; };
-n522: token { $$ = 522; };
-n523: token { $$ = 523; };
-n524: token { $$ = 524; };
-n525: token { $$ = 525; };
-n526: token { $$ = 526; };
-n527: token { $$ = 527; };
-n528: token { $$ = 528; };
-n529: token { $$ = 529; };
-n530: token { $$ = 530; };
-n531: token { $$ = 531; };
-n532: token { $$ = 532; };
-n533: token { $$ = 533; };
-n534: token { $$ = 534; };
-n535: token { $$ = 535; };
-n536: token { $$ = 536; };
-n537: token { $$ = 537; };
-n538: token { $$ = 538; };
-n539: token { $$ = 539; };
-n540: token { $$ = 540; };
-n541: token { $$ = 541; };
-n542: token { $$ = 542; };
-n543: token { $$ = 543; };
-n544: token { $$ = 544; };
-n545: token { $$ = 545; };
-n546: token { $$ = 546; };
-n547: token { $$ = 547; };
-n548: token { $$ = 548; };
-n549: token { $$ = 549; };
-n550: token { $$ = 550; };
-n551: token { $$ = 551; };
-n552: token { $$ = 552; };
-n553: token { $$ = 553; };
-n554: token { $$ = 554; };
-n555: token { $$ = 555; };
-n556: token { $$ = 556; };
-n557: token { $$ = 557; };
-n558: token { $$ = 558; };
-n559: token { $$ = 559; };
-n560: token { $$ = 560; };
-n561: token { $$ = 561; };
-n562: token { $$ = 562; };
-n563: token { $$ = 563; };
-n564: token { $$ = 564; };
-n565: token { $$ = 565; };
-n566: token { $$ = 566; };
-n567: token { $$ = 567; };
-n568: token { $$ = 568; };
-n569: token { $$ = 569; };
-n570: token { $$ = 570; };
-n571: token { $$ = 571; };
-n572: token { $$ = 572; };
-n573: token { $$ = 573; };
-n574: token { $$ = 574; };
-n575: token { $$ = 575; };
-n576: token { $$ = 576; };
-n577: token { $$ = 577; };
-n578: token { $$ = 578; };
-n579: token { $$ = 579; };
-n580: token { $$ = 580; };
-n581: token { $$ = 581; };
-n582: token { $$ = 582; };
-n583: token { $$ = 583; };
-n584: token { $$ = 584; };
-n585: token { $$ = 585; };
-n586: token { $$ = 586; };
-n587: token { $$ = 587; };
-n588: token { $$ = 588; };
-n589: token { $$ = 589; };
-n590: token { $$ = 590; };
-n591: token { $$ = 591; };
-n592: token { $$ = 592; };
-n593: token { $$ = 593; };
-n594: token { $$ = 594; };
-n595: token { $$ = 595; };
-n596: token { $$ = 596; };
-n597: token { $$ = 597; };
-n598: token { $$ = 598; };
-n599: token { $$ = 599; };
-n600: token { $$ = 600; };
-n601: token { $$ = 601; };
-n602: token { $$ = 602; };
-n603: token { $$ = 603; };
-n604: token { $$ = 604; };
-n605: token { $$ = 605; };
-n606: token { $$ = 606; };
-n607: token { $$ = 607; };
-n608: token { $$ = 608; };
-n609: token { $$ = 609; };
-n610: token { $$ = 610; };
-n611: token { $$ = 611; };
-n612: token { $$ = 612; };
-n613: token { $$ = 613; };
-n614: token { $$ = 614; };
-n615: token { $$ = 615; };
-n616: token { $$ = 616; };
-n617: token { $$ = 617; };
-n618: token { $$ = 618; };
-n619: token { $$ = 619; };
-n620: token { $$ = 620; };
-n621: token { $$ = 621; };
-n622: token { $$ = 622; };
-n623: token { $$ = 623; };
-n624: token { $$ = 624; };
-n625: token { $$ = 625; };
-n626: token { $$ = 626; };
-n627: token { $$ = 627; };
-n628: token { $$ = 628; };
-n629: token { $$ = 629; };
-n630: token { $$ = 630; };
-n631: token { $$ = 631; };
-n632: token { $$ = 632; };
-n633: token { $$ = 633; };
-n634: token { $$ = 634; };
-n635: token { $$ = 635; };
-n636: token { $$ = 636; };
-n637: token { $$ = 637; };
-n638: token { $$ = 638; };
-n639: token { $$ = 639; };
-n640: token { $$ = 640; };
-n641: token { $$ = 641; };
-n642: token { $$ = 642; };
-n643: token { $$ = 643; };
-n644: token { $$ = 644; };
-n645: token { $$ = 645; };
-n646: token { $$ = 646; };
-n647: token { $$ = 647; };
-n648: token { $$ = 648; };
-n649: token { $$ = 649; };
-n650: token { $$ = 650; };
-n651: token { $$ = 651; };
-n652: token { $$ = 652; };
-n653: token { $$ = 653; };
-n654: token { $$ = 654; };
-n655: token { $$ = 655; };
-n656: token { $$ = 656; };
-n657: token { $$ = 657; };
-n658: token { $$ = 658; };
-n659: token { $$ = 659; };
-n660: token { $$ = 660; };
-n661: token { $$ = 661; };
-n662: token { $$ = 662; };
-n663: token { $$ = 663; };
-n664: token { $$ = 664; };
-n665: token { $$ = 665; };
-n666: token { $$ = 666; };
-n667: token { $$ = 667; };
-n668: token { $$ = 668; };
-n669: token { $$ = 669; };
-n670: token { $$ = 670; };
-n671: token { $$ = 671; };
-n672: token { $$ = 672; };
-n673: token { $$ = 673; };
-n674: token { $$ = 674; };
-n675: token { $$ = 675; };
-n676: token { $$ = 676; };
-n677: token { $$ = 677; };
-n678: token { $$ = 678; };
-n679: token { $$ = 679; };
-n680: token { $$ = 680; };
-n681: token { $$ = 681; };
-n682: token { $$ = 682; };
-n683: token { $$ = 683; };
-n684: token { $$ = 684; };
-n685: token { $$ = 685; };
-n686: token { $$ = 686; };
-n687: token { $$ = 687; };
-n688: token { $$ = 688; };
-n689: token { $$ = 689; };
-n690: token { $$ = 690; };
-n691: token { $$ = 691; };
-n692: token { $$ = 692; };
-n693: token { $$ = 693; };
-n694: token { $$ = 694; };
-n695: token { $$ = 695; };
-n696: token { $$ = 696; };
-n697: token { $$ = 697; };
-n698: token { $$ = 698; };
-n699: token { $$ = 699; };
-n700: token { $$ = 700; };
-n701: token { $$ = 701; };
-n702: token { $$ = 702; };
-n703: token { $$ = 703; };
-n704: token { $$ = 704; };
-n705: token { $$ = 705; };
-n706: token { $$ = 706; };
-n707: token { $$ = 707; };
-n708: token { $$ = 708; };
-n709: token { $$ = 709; };
-n710: token { $$ = 710; };
-n711: token { $$ = 711; };
-n712: token { $$ = 712; };
-n713: token { $$ = 713; };
-n714: token { $$ = 714; };
-n715: token { $$ = 715; };
-n716: token { $$ = 716; };
-n717: token { $$ = 717; };
-n718: token { $$ = 718; };
-n719: token { $$ = 719; };
-n720: token { $$ = 720; };
-n721: token { $$ = 721; };
-n722: token { $$ = 722; };
-n723: token { $$ = 723; };
-n724: token { $$ = 724; };
-n725: token { $$ = 725; };
-n726: token { $$ = 726; };
-n727: token { $$ = 727; };
-n728: token { $$ = 728; };
-n729: token { $$ = 729; };
-n730: token { $$ = 730; };
-n731: token { $$ = 731; };
-n732: token { $$ = 732; };
-n733: token { $$ = 733; };
-n734: token { $$ = 734; };
-n735: token { $$ = 735; };
-n736: token { $$ = 736; };
-n737: token { $$ = 737; };
-n738: token { $$ = 738; };
-n739: token { $$ = 739; };
-n740: token { $$ = 740; };
-n741: token { $$ = 741; };
-n742: token { $$ = 742; };
-n743: token { $$ = 743; };
-n744: token { $$ = 744; };
-n745: token { $$ = 745; };
-n746: token { $$ = 746; };
-n747: token { $$ = 747; };
-n748: token { $$ = 748; };
-n749: token { $$ = 749; };
-n750: token { $$ = 750; };
-n751: token { $$ = 751; };
-n752: token { $$ = 752; };
-n753: token { $$ = 753; };
-n754: token { $$ = 754; };
-n755: token { $$ = 755; };
-n756: token { $$ = 756; };
-n757: token { $$ = 757; };
-n758: token { $$ = 758; };
-n759: token { $$ = 759; };
-n760: token { $$ = 760; };
-n761: token { $$ = 761; };
-n762: token { $$ = 762; };
-n763: token { $$ = 763; };
-n764: token { $$ = 764; };
-n765: token { $$ = 765; };
-n766: token { $$ = 766; };
-n767: token { $$ = 767; };
-n768: token { $$ = 768; };
-n769: token { $$ = 769; };
-n770: token { $$ = 770; };
-n771: token { $$ = 771; };
-n772: token { $$ = 772; };
-n773: token { $$ = 773; };
-n774: token { $$ = 774; };
-n775: token { $$ = 775; };
-n776: token { $$ = 776; };
-n777: token { $$ = 777; };
-n778: token { $$ = 778; };
-n779: token { $$ = 779; };
-n780: token { $$ = 780; };
-n781: token { $$ = 781; };
-n782: token { $$ = 782; };
-n783: token { $$ = 783; };
-n784: token { $$ = 784; };
-n785: token { $$ = 785; };
-n786: token { $$ = 786; };
-n787: token { $$ = 787; };
-n788: token { $$ = 788; };
-n789: token { $$ = 789; };
-n790: token { $$ = 790; };
-n791: token { $$ = 791; };
-n792: token { $$ = 792; };
-n793: token { $$ = 793; };
-n794: token { $$ = 794; };
-n795: token { $$ = 795; };
-n796: token { $$ = 796; };
-n797: token { $$ = 797; };
-n798: token { $$ = 798; };
-n799: token { $$ = 799; };
-n800: token { $$ = 800; };
-n801: token { $$ = 801; };
-n802: token { $$ = 802; };
-n803: token { $$ = 803; };
-n804: token { $$ = 804; };
-n805: token { $$ = 805; };
-n806: token { $$ = 806; };
-n807: token { $$ = 807; };
-n808: token { $$ = 808; };
-n809: token { $$ = 809; };
-n810: token { $$ = 810; };
-n811: token { $$ = 811; };
-n812: token { $$ = 812; };
-n813: token { $$ = 813; };
-n814: token { $$ = 814; };
-n815: token { $$ = 815; };
-n816: token { $$ = 816; };
-n817: token { $$ = 817; };
-n818: token { $$ = 818; };
-n819: token { $$ = 819; };
-n820: token { $$ = 820; };
-n821: token { $$ = 821; };
-n822: token { $$ = 822; };
-n823: token { $$ = 823; };
-n824: token { $$ = 824; };
-n825: token { $$ = 825; };
-n826: token { $$ = 826; };
-n827: token { $$ = 827; };
-n828: token { $$ = 828; };
-n829: token { $$ = 829; };
-n830: token { $$ = 830; };
-n831: token { $$ = 831; };
-n832: token { $$ = 832; };
-n833: token { $$ = 833; };
-n834: token { $$ = 834; };
-n835: token { $$ = 835; };
-n836: token { $$ = 836; };
-n837: token { $$ = 837; };
-n838: token { $$ = 838; };
-n839: token { $$ = 839; };
-n840: token { $$ = 840; };
-n841: token { $$ = 841; };
-n842: token { $$ = 842; };
-n843: token { $$ = 843; };
-n844: token { $$ = 844; };
-n845: token { $$ = 845; };
-n846: token { $$ = 846; };
-n847: token { $$ = 847; };
-n848: token { $$ = 848; };
-n849: token { $$ = 849; };
-n850: token { $$ = 850; };
-n851: token { $$ = 851; };
-n852: token { $$ = 852; };
-n853: token { $$ = 853; };
-n854: token { $$ = 854; };
-n855: token { $$ = 855; };
-n856: token { $$ = 856; };
-n857: token { $$ = 857; };
-n858: token { $$ = 858; };
-n859: token { $$ = 859; };
-n860: token { $$ = 860; };
-n861: token { $$ = 861; };
-n862: token { $$ = 862; };
-n863: token { $$ = 863; };
-n864: token { $$ = 864; };
-n865: token { $$ = 865; };
-n866: token { $$ = 866; };
-n867: token { $$ = 867; };
-n868: token { $$ = 868; };
-n869: token { $$ = 869; };
-n870: token { $$ = 870; };
-n871: token { $$ = 871; };
-n872: token { $$ = 872; };
-n873: token { $$ = 873; };
-n874: token { $$ = 874; };
-n875: token { $$ = 875; };
-n876: token { $$ = 876; };
-n877: token { $$ = 877; };
-n878: token { $$ = 878; };
-n879: token { $$ = 879; };
-n880: token { $$ = 880; };
-n881: token { $$ = 881; };
-n882: token { $$ = 882; };
-n883: token { $$ = 883; };
-n884: token { $$ = 884; };
-n885: token { $$ = 885; };
-n886: token { $$ = 886; };
-n887: token { $$ = 887; };
-n888: token { $$ = 888; };
-n889: token { $$ = 889; };
-n890: token { $$ = 890; };
-n891: token { $$ = 891; };
-n892: token { $$ = 892; };
-n893: token { $$ = 893; };
-n894: token { $$ = 894; };
-n895: token { $$ = 895; };
-n896: token { $$ = 896; };
-n897: token { $$ = 897; };
-n898: token { $$ = 898; };
-n899: token { $$ = 899; };
-n900: token { $$ = 900; };
-n901: token { $$ = 901; };
-n902: token { $$ = 902; };
-n903: token { $$ = 903; };
-n904: token { $$ = 904; };
-n905: token { $$ = 905; };
-n906: token { $$ = 906; };
-n907: token { $$ = 907; };
-n908: token { $$ = 908; };
-n909: token { $$ = 909; };
-n910: token { $$ = 910; };
-n911: token { $$ = 911; };
-n912: token { $$ = 912; };
-n913: token { $$ = 913; };
-n914: token { $$ = 914; };
-n915: token { $$ = 915; };
-n916: token { $$ = 916; };
-n917: token { $$ = 917; };
-n918: token { $$ = 918; };
-n919: token { $$ = 919; };
-n920: token { $$ = 920; };
-n921: token { $$ = 921; };
-n922: token { $$ = 922; };
-n923: token { $$ = 923; };
-n924: token { $$ = 924; };
-n925: token { $$ = 925; };
-n926: token { $$ = 926; };
-n927: token { $$ = 927; };
-n928: token { $$ = 928; };
-n929: token { $$ = 929; };
-n930: token { $$ = 930; };
-n931: token { $$ = 931; };
-n932: token { $$ = 932; };
-n933: token { $$ = 933; };
-n934: token { $$ = 934; };
-n935: token { $$ = 935; };
-n936: token { $$ = 936; };
-n937: token { $$ = 937; };
-n938: token { $$ = 938; };
-n939: token { $$ = 939; };
-n940: token { $$ = 940; };
-n941: token { $$ = 941; };
-n942: token { $$ = 942; };
-n943: token { $$ = 943; };
-n944: token { $$ = 944; };
-n945: token { $$ = 945; };
-n946: token { $$ = 946; };
-n947: token { $$ = 947; };
-n948: token { $$ = 948; };
-n949: token { $$ = 949; };
-n950: token { $$ = 950; };
-n951: token { $$ = 951; };
-n952: token { $$ = 952; };
-n953: token { $$ = 953; };
-n954: token { $$ = 954; };
-n955: token { $$ = 955; };
-n956: token { $$ = 956; };
-n957: token { $$ = 957; };
-n958: token { $$ = 958; };
-n959: token { $$ = 959; };
-n960: token { $$ = 960; };
-n961: token { $$ = 961; };
-n962: token { $$ = 962; };
-n963: token { $$ = 963; };
-n964: token { $$ = 964; };
-n965: token { $$ = 965; };
-n966: token { $$ = 966; };
-n967: token { $$ = 967; };
-n968: token { $$ = 968; };
-n969: token { $$ = 969; };
-n970: token { $$ = 970; };
-n971: token { $$ = 971; };
-n972: token { $$ = 972; };
-n973: token { $$ = 973; };
-n974: token { $$ = 974; };
-n975: token { $$ = 975; };
-n976: token { $$ = 976; };
-n977: token { $$ = 977; };
-n978: token { $$ = 978; };
-n979: token { $$ = 979; };
-n980: token { $$ = 980; };
-n981: token { $$ = 981; };
-n982: token { $$ = 982; };
-n983: token { $$ = 983; };
-n984: token { $$ = 984; };
-n985: token { $$ = 985; };
-n986: token { $$ = 986; };
-n987: token { $$ = 987; };
-n988: token { $$ = 988; };
-n989: token { $$ = 989; };
-n990: token { $$ = 990; };
-n991: token { $$ = 991; };
-n992: token { $$ = 992; };
-n993: token { $$ = 993; };
-n994: token { $$ = 994; };
-n995: token { $$ = 995; };
-n996: token { $$ = 996; };
-n997: token { $$ = 997; };
-n998: token { $$ = 998; };
-n999: token { $$ = 999; };
-n1000: token { $$ = 1000; };
+    t263 263 "263"
+    t264 264 "264"
+    t265 265 "265"
+    t266 266 "266"
+    t267 267 "267"
+    t268 268 "268"
+    t269 269 "269"
+    t270 270 "270"
+    t271 271 "271"
+    t272 272 "272"
+    t273 273 "273"
+    t274 274 "274"
+    t275 275 "275"
+    t276 276 "276"
+    t277 277 "277"
+    t278 278 "278"
+    t279 279 "279"
+    t280 280 "280"
+    t281 281 "281"
+    t282 282 "282"
+    t283 283 "283"
+    t284 284 "284"
+    t285 285 "285"
+    t286 286 "286"
+    t287 287 "287"
+    t288 288 "288"
+    t289 289 "289"
+    t290 290 "290"
+    t291 291 "291"
+    t292 292 "292"
+    t293 293 "293"
+    t294 294 "294"
+    t295 295 "295"
+    t296 296 "296"
+    t297 297 "297"
+    t298 298 "298"
+    t299 299 "299"
+    t300 300 "300"
+    t301 301 "301"
+    t302 302 "302"
+    t303 303 "303"
+    t304 304 "304"
+    t305 305 "305"
+    t306 306 "306"
+    t307 307 "307"
+    t308 308 "308"
+    t309 309 "309"
+    t310 310 "310"
+    t311 311 "311"
+    t312 312 "312"
+    t313 313 "313"
+    t314 314 "314"
+    t315 315 "315"
+    t316 316 "316"
+    t317 317 "317"
+    t318 318 "318"
+    t319 319 "319"
+    t320 320 "320"
+    t321 321 "321"
+    t322 322 "322"
+    t323 323 "323"
+    t324 324 "324"
+    t325 325 "325"
+    t326 326 "326"
+    t327 327 "327"
+    t328 328 "328"
+    t329 329 "329"
+    t330 330 "330"
+    t331 331 "331"
+    t332 332 "332"
+    t333 333 "333"
+    t334 334 "334"
+    t335 335 "335"
+    t336 336 "336"
+    t337 337 "337"
+    t338 338 "338"
+    t339 339 "339"
+    t340 340 "340"
+    t341 341 "341"
+    t342 342 "342"
+    t343 343 "343"
+    t344 344 "344"
+    t345 345 "345"
+    t346 346 "346"
+    t347 347 "347"
+    t348 348 "348"
+    t349 349 "349"
+    t350 350 "350"
+    t351 351 "351"
+    t352 352 "352"
+    t353 353 "353"
+    t354 354 "354"
+    t355 355 "355"
+    t356 356 "356"
+    t357 357 "357"
+    t358 358 "358"
+    t359 359 "359"
+    t360 360 "360"
+    t361 361 "361"
+    t362 362 "362"
+    t363 363 "363"
+    t364 364 "364"
+    t365 365 "365"
+    t366 366 "366"
+    t367 367 "367"
+    t368 368 "368"
+    t369 369 "369"
+    t370 370 "370"
+    t371 371 "371"
+    t372 372 "372"
+    t373 373 "373"
+    t374 374 "374"
+    t375 375 "375"
+    t376 376 "376"
+    t377 377 "377"
+    t378 378 "378"
+    t379 379 "379"
+    t380 380 "380"
+    t381 381 "381"
+    t382 382 "382"
+    t383 383 "383"
+    t384 384 "384"
+    t385 385 "385"
+    t386 386 "386"
+    t387 387 "387"
+    t388 388 "388"
+    t389 389 "389"
+    t390 390 "390"
+    t391 391 "391"
+    t392 392 "392"
+    t393 393 "393"
+    t394 394 "394"
+    t395 395 "395"
+    t396 396 "396"
+    t397 397 "397"
+    t398 398 "398"
+    t399 399 "399"
+    t400 400 "400"
+    t401 401 "401"
+    t402 402 "402"
+    t403 403 "403"
+    t404 404 "404"
+    t405 405 "405"
+    t406 406 "406"
+    t407 407 "407"
+    t408 408 "408"
+    t409 409 "409"
+    t410 410 "410"
+    t411 411 "411"
+    t412 412 "412"
+    t413 413 "413"
+    t414 414 "414"
+    t415 415 "415"
+    t416 416 "416"
+    t417 417 "417"
+    t418 418 "418"
+    t419 419 "419"
+    t420 420 "420"
+    t421 421 "421"
+    t422 422 "422"
+    t423 423 "423"
+    t424 424 "424"
+    t425 425 "425"
+    t426 426 "426"
+    t427 427 "427"
+    t428 428 "428"
+    t429 429 "429"
+    t430 430 "430"
+    t431 431 "431"
+    t432 432 "432"
+    t433 433 "433"
+    t434 434 "434"
+    t435 435 "435"
+    t436 436 "436"
+    t437 437 "437"
+    t438 438 "438"
+    t439 439 "439"
+    t440 440 "440"
+    t441 441 "441"
+    t442 442 "442"
+    t443 443 "443"
+    t444 444 "444"
+    t445 445 "445"
+    t446 446 "446"
+    t447 447 "447"
+    t448 448 "448"
+    t449 449 "449"
+    t450 450 "450"
+    t451 451 "451"
+    t452 452 "452"
+    t453 453 "453"
+    t454 454 "454"
+    t455 455 "455"
+    t456 456 "456"
+    t457 457 "457"
+    t458 458 "458"
+    t459 459 "459"
+    t460 460 "460"
+    t461 461 "461"
+    t462 462 "462"
+    t463 463 "463"
+    t464 464 "464"
+    t465 465 "465"
+    t466 466 "466"
+    t467 467 "467"
+    t468 468 "468"
+    t469 469 "469"
+    t470 470 "470"
+    t471 471 "471"
+    t472 472 "472"
+    t473 473 "473"
+    t474 474 "474"
+    t475 475 "475"
+    t476 476 "476"
+    t477 477 "477"
+    t478 478 "478"
+    t479 479 "479"
+    t480 480 "480"
+    t481 481 "481"
+    t482 482 "482"
+    t483 483 "483"
+    t484 484 "484"
+    t485 485 "485"
+    t486 486 "486"
+    t487 487 "487"
+    t488 488 "488"
+    t489 489 "489"
+    t490 490 "490"
+    t491 491 "491"
+    t492 492 "492"
+    t493 493 "493"
+    t494 494 "494"
+    t495 495 "495"
+    t496 496 "496"
+    t497 497 "497"
+    t498 498 "498"
+    t499 499 "499"
+    t500 500 "500"
+    t501 501 "501"
+    t502 502 "502"
+    t503 503 "503"
+    t504 504 "504"
+    t505 505 "505"
+    t506 506 "506"
+    t507 507 "507"
+    t508 508 "508"
+    t509 509 "509"
+    t510 510 "510"
+    t511 511 "511"
+    t512 512 "512"
+    t513 513 "513"
+    t514 514 "514"
+    t515 515 "515"
+    t516 516 "516"
+    t517 517 "517"
+    t518 518 "518"
+    t519 519 "519"
+    t520 520 "520"
+    t521 521 "521"
+    t522 522 "522"
+    t523 523 "523"
+    t524 524 "524"
+    t525 525 "525"
+    t526 526 "526"
+    t527 527 "527"
+    t528 528 "528"
+    t529 529 "529"
+    t530 530 "530"
+    t531 531 "531"
+    t532 532 "532"
+    t533 533 "533"
+    t534 534 "534"
+    t535 535 "535"
+    t536 536 "536"
+    t537 537 "537"
+    t538 538 "538"
+    t539 539 "539"
+    t540 540 "540"
+    t541 541 "541"
+    t542 542 "542"
+    t543 543 "543"
+    t544 544 "544"
+    t545 545 "545"
+    t546 546 "546"
+    t547 547 "547"
+    t548 548 "548"
+    t549 549 "549"
+    t550 550 "550"
+    t551 551 "551"
+    t552 552 "552"
+    t553 553 "553"
+    t554 554 "554"
+    t555 555 "555"
+    t556 556 "556"
+    t557 557 "557"
+    t558 558 "558"
+    t559 559 "559"
+    t560 560 "560"
+    t561 561 "561"
+    t562 562 "562"
+    t563 563 "563"
+    t564 564 "564"
+    t565 565 "565"
+    t566 566 "566"
+    t567 567 "567"
+    t568 568 "568"
+    t569 569 "569"
+    t570 570 "570"
+    t571 571 "571"
+    t572 572 "572"
+    t573 573 "573"
+    t574 574 "574"
+    t575 575 "575"
+    t576 576 "576"
+    t577 577 "577"
+    t578 578 "578"
+    t579 579 "579"
+    t580 580 "580"
+    t581 581 "581"
+    t582 582 "582"
+    t583 583 "583"
+    t584 584 "584"
+    t585 585 "585"
+    t586 586 "586"
+    t587 587 "587"
+    t588 588 "588"
+    t589 589 "589"
+    t590 590 "590"
+    t591 591 "591"
+    t592 592 "592"
+    t593 593 "593"
+    t594 594 "594"
+    t595 595 "595"
+    t596 596 "596"
+    t597 597 "597"
+    t598 598 "598"
+    t599 599 "599"
+    t600 600 "600"
+    t601 601 "601"
+    t602 602 "602"
+    t603 603 "603"
+    t604 604 "604"
+    t605 605 "605"
+    t606 606 "606"
+    t607 607 "607"
+    t608 608 "608"
+    t609 609 "609"
+    t610 610 "610"
+    t611 611 "611"
+    t612 612 "612"
+    t613 613 "613"
+    t614 614 "614"
+    t615 615 "615"
+    t616 616 "616"
+    t617 617 "617"
+    t618 618 "618"
+    t619 619 "619"
+    t620 620 "620"
+    t621 621 "621"
+    t622 622 "622"
+    t623 623 "623"
+    t624 624 "624"
+    t625 625 "625"
+    t626 626 "626"
+    t627 627 "627"
+    t628 628 "628"
+    t629 629 "629"
+    t630 630 "630"
+    t631 631 "631"
+    t632 632 "632"
+    t633 633 "633"
+    t634 634 "634"
+    t635 635 "635"
+    t636 636 "636"
+    t637 637 "637"
+    t638 638 "638"
+    t639 639 "639"
+    t640 640 "640"
+    t641 641 "641"
+    t642 642 "642"
+    t643 643 "643"
+    t644 644 "644"
+    t645 645 "645"
+    t646 646 "646"
+    t647 647 "647"
+    t648 648 "648"
+    t649 649 "649"
+    t650 650 "650"
+    t651 651 "651"
+    t652 652 "652"
+    t653 653 "653"
+    t654 654 "654"
+    t655 655 "655"
+    t656 656 "656"
+    t657 657 "657"
+    t658 658 "658"
+    t659 659 "659"
+    t660 660 "660"
+    t661 661 "661"
+    t662 662 "662"
+    t663 663 "663"
+    t664 664 "664"
+    t665 665 "665"
+    t666 666 "666"
+    t667 667 "667"
+    t668 668 "668"
+    t669 669 "669"
+    t670 670 "670"
+    t671 671 "671"
+    t672 672 "672"
+    t673 673 "673"
+    t674 674 "674"
+    t675 675 "675"
+    t676 676 "676"
+    t677 677 "677"
+    t678 678 "678"
+    t679 679 "679"
+    t680 680 "680"
+    t681 681 "681"
+    t682 682 "682"
+    t683 683 "683"
+    t684 684 "684"
+    t685 685 "685"
+    t686 686 "686"
+    t687 687 "687"
+    t688 688 "688"
+    t689 689 "689"
+    t690 690 "690"
+    t691 691 "691"
+    t692 692 "692"
+    t693 693 "693"
+    t694 694 "694"
+    t695 695 "695"
+    t696 696 "696"
+    t697 697 "697"
+    t698 698 "698"
+    t699 699 "699"
+    t700 700 "700"
+    t701 701 "701"
+    t702 702 "702"
+    t703 703 "703"
+    t704 704 "704"
+    t705 705 "705"
+    t706 706 "706"
+    t707 707 "707"
+    t708 708 "708"
+    t709 709 "709"
+    t710 710 "710"
+    t711 711 "711"
+    t712 712 "712"
+    t713 713 "713"
+    t714 714 "714"
+    t715 715 "715"
+    t716 716 "716"
+    t717 717 "717"
+    t718 718 "718"
+    t719 719 "719"
+    t720 720 "720"
+    t721 721 "721"
+    t722 722 "722"
+    t723 723 "723"
+    t724 724 "724"
+    t725 725 "725"
+    t726 726 "726"
+    t727 727 "727"
+    t728 728 "728"
+    t729 729 "729"
+    t730 730 "730"
+    t731 731 "731"
+    t732 732 "732"
+    t733 733 "733"
+    t734 734 "734"
+    t735 735 "735"
+    t736 736 "736"
+    t737 737 "737"
+    t738 738 "738"
+    t739 739 "739"
+    t740 740 "740"
+    t741 741 "741"
+    t742 742 "742"
+    t743 743 "743"
+    t744 744 "744"
+    t745 745 "745"
+    t746 746 "746"
+    t747 747 "747"
+    t748 748 "748"
+    t749 749 "749"
+    t750 750 "750"
+    t751 751 "751"
+    t752 752 "752"
+    t753 753 "753"
+    t754 754 "754"
+    t755 755 "755"
+    t756 756 "756"
+    t757 757 "757"
+    t758 758 "758"
+    t759 759 "759"
+    t760 760 "760"
+    t761 761 "761"
+    t762 762 "762"
+    t763 763 "763"
+    t764 764 "764"
+    t765 765 "765"
+    t766 766 "766"
+    t767 767 "767"
+    t768 768 "768"
+    t769 769 "769"
+    t770 770 "770"
+    t771 771 "771"
+    t772 772 "772"
+    t773 773 "773"
+    t774 774 "774"
+    t775 775 "775"
+    t776 776 "776"
+    t777 777 "777"
+    t778 778 "778"
+    t779 779 "779"
+    t780 780 "780"
+    t781 781 "781"
+    t782 782 "782"
+    t783 783 "783"
+    t784 784 "784"
+    t785 785 "785"
+    t786 786 "786"
+    t787 787 "787"
+    t788 788 "788"
+    t789 789 "789"
+    t790 790 "790"
+    t791 791 "791"
+    t792 792 "792"
+    t793 793 "793"
+    t794 794 "794"
+    t795 795 "795"
+    t796 796 "796"
+    t797 797 "797"
+    t798 798 "798"
+    t799 799 "799"
+    t800 800 "800"
+    t801 801 "801"
+    t802 802 "802"
+    t803 803 "803"
+    t804 804 "804"
+    t805 805 "805"
+    t806 806 "806"
+    t807 807 "807"
+    t808 808 "808"
+    t809 809 "809"
+    t810 810 "810"
+    t811 811 "811"
+    t812 812 "812"
+    t813 813 "813"
+    t814 814 "814"
+    t815 815 "815"
+    t816 816 "816"
+    t817 817 "817"
+    t818 818 "818"
+    t819 819 "819"
+    t820 820 "820"
+    t821 821 "821"
+    t822 822 "822"
+    t823 823 "823"
+    t824 824 "824"
+    t825 825 "825"
+    t826 826 "826"
+    t827 827 "827"
+    t828 828 "828"
+    t829 829 "829"
+    t830 830 "830"
+    t831 831 "831"
+    t832 832 "832"
+    t833 833 "833"
+    t834 834 "834"
+    t835 835 "835"
+    t836 836 "836"
+    t837 837 "837"
+    t838 838 "838"
+    t839 839 "839"
+    t840 840 "840"
+    t841 841 "841"
+    t842 842 "842"
+    t843 843 "843"
+    t844 844 "844"
+    t845 845 "845"
+    t846 846 "846"
+    t847 847 "847"
+    t848 848 "848"
+    t849 849 "849"
+    t850 850 "850"
+    t851 851 "851"
+    t852 852 "852"
+    t853 853 "853"
+    t854 854 "854"
+    t855 855 "855"
+    t856 856 "856"
+    t857 857 "857"
+    t858 858 "858"
+    t859 859 "859"
+    t860 860 "860"
+    t861 861 "861"
+    t862 862 "862"
+    t863 863 "863"
+    t864 864 "864"
+    t865 865 "865"
+    t866 866 "866"
+    t867 867 "867"
+    t868 868 "868"
+    t869 869 "869"
+    t870 870 "870"
+    t871 871 "871"
+    t872 872 "872"
+    t873 873 "873"
+    t874 874 "874"
+    t875 875 "875"
+    t876 876 "876"
+    t877 877 "877"
+    t878 878 "878"
+    t879 879 "879"
+    t880 880 "880"
+    t881 881 "881"
+    t882 882 "882"
+    t883 883 "883"
+    t884 884 "884"
+    t885 885 "885"
+    t886 886 "886"
+    t887 887 "887"
+    t888 888 "888"
+    t889 889 "889"
+    t890 890 "890"
+    t891 891 "891"
+    t892 892 "892"
+    t893 893 "893"
+    t894 894 "894"
+    t895 895 "895"
+    t896 896 "896"
+    t897 897 "897"
+    t898 898 "898"
+    t899 899 "899"
+    t900 900 "900"
+    t901 901 "901"
+    t902 902 "902"
+    t903 903 "903"
+    t904 904 "904"
+    t905 905 "905"
+    t906 906 "906"
+    t907 907 "907"
+    t908 908 "908"
+    t909 909 "909"
+    t910 910 "910"
+    t911 911 "911"
+    t912 912 "912"
+    t913 913 "913"
+    t914 914 "914"
+    t915 915 "915"
+    t916 916 "916"
+    t917 917 "917"
+    t918 918 "918"
+    t919 919 "919"
+    t920 920 "920"
+    t921 921 "921"
+    t922 922 "922"
+    t923 923 "923"
+    t924 924 "924"
+    t925 925 "925"
+    t926 926 "926"
+    t927 927 "927"
+    t928 928 "928"
+    t929 929 "929"
+    t930 930 "930"
+    t931 931 "931"
+    t932 932 "932"
+    t933 933 "933"
+    t934 934 "934"
+    t935 935 "935"
+    t936 936 "936"
+    t937 937 "937"
+    t938 938 "938"
+    t939 939 "939"
+    t940 940 "940"
+    t941 941 "941"
+    t942 942 "942"
+    t943 943 "943"
+    t944 944 "944"
+    t945 945 "945"
+    t946 946 "946"
+    t947 947 "947"
+    t948 948 "948"
+    t949 949 "949"
+    t950 950 "950"
+    t951 951 "951"
+    t952 952 "952"
+    t953 953 "953"
+    t954 954 "954"
+    t955 955 "955"
+    t956 956 "956"
+    t957 957 "957"
+    t958 958 "958"
+    t959 959 "959"
+    t960 960 "960"
+    t961 961 "961"
+    t962 962 "962"
+    t963 963 "963"
+    t964 964 "964"
+    t965 965 "965"
+    t966 966 "966"
+    t967 967 "967"
+    t968 968 "968"
+    t969 969 "969"
+    t970 970 "970"
+    t971 971 "971"
+    t972 972 "972"
+    t973 973 "973"
+    t974 974 "974"
+    t975 975 "975"
+    t976 976 "976"
+    t977 977 "977"
+    t978 978 "978"
+    t979 979 "979"
+    t980 980 "980"
+    t981 981 "981"
+    t982 982 "982"
+    t983 983 "983"
+    t984 984 "984"
+    t985 985 "985"
+    t986 986 "986"
+    t987 987 "987"
+    t988 988 "988"
+    t989 989 "989"
+    t990 990 "990"
+    t991 991 "991"
+    t992 992 "992"
+    t993 993 "993"
+    t994 994 "994"
+    t995 995 "995"
+    t996 996 "996"
+    t997 997 "997"
+    t998 998 "998"
+    t999 999 "999"
+    t1000 1000 "1000"
+
+%%
+exp: "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+  "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+  "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+  "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+  "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+  "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+  "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+  "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+  "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+  "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+  "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+  "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+  "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+  "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
+  "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196"
+  "197" "198" "199" "200" "201" "202" "203" "204" "205" "206" "207" "208"
+  "209" "210" "211" "212" "213" "214" "215" "216" "217" "218" "219" "220"
+  "221" "222" "223" "224" "225" "226" "227" "228" "229" "230" "231" "232"
+  "233" "234" "235" "236" "237" "238" "239" "240" "241" "242" "243" "244"
+  "245" "246" "247" "248" "249" "250" "251" "252" "253" "254" "255" "256"
+  "257" "258" "259" "260" "261" "262" "263" "264" "265" "266" "267" "268"
+  "269" "270" "271" "272" "273" "274" "275" "276" "277" "278" "279" "280"
+  "281" "282" "283" "284" "285" "286" "287" "288" "289" "290" "291" "292"
+  "293" "294" "295" "296" "297" "298" "299" "300" "301" "302" "303" "304"
+  "305" "306" "307" "308" "309" "310" "311" "312" "313" "314" "315" "316"
+  "317" "318" "319" "320" "321" "322" "323" "324" "325" "326" "327" "328"
+  "329" "330" "331" "332" "333" "334" "335" "336" "337" "338" "339" "340"
+  "341" "342" "343" "344" "345" "346" "347" "348" "349" "350" "351" "352"
+  "353" "354" "355" "356" "357" "358" "359" "360" "361" "362" "363" "364"
+  "365" "366" "367" "368" "369" "370" "371" "372" "373" "374" "375" "376"
+  "377" "378" "379" "380" "381" "382" "383" "384" "385" "386" "387" "388"
+  "389" "390" "391" "392" "393" "394" "395" "396" "397" "398" "399" "400"
+  "401" "402" "403" "404" "405" "406" "407" "408" "409" "410" "411" "412"
+  "413" "414" "415" "416" "417" "418" "419" "420" "421" "422" "423" "424"
+  "425" "426" "427" "428" "429" "430" "431" "432" "433" "434" "435" "436"
+  "437" "438" "439" "440" "441" "442" "443" "444" "445" "446" "447" "448"
+  "449" "450" "451" "452" "453" "454" "455" "456" "457" "458" "459" "460"
+  "461" "462" "463" "464" "465" "466" "467" "468" "469" "470" "471" "472"
+  "473" "474" "475" "476" "477" "478" "479" "480" "481" "482" "483" "484"
+  "485" "486" "487" "488" "489" "490" "491" "492" "493" "494" "495" "496"
+  "497" "498" "499" "500" "501" "502" "503" "504" "505" "506" "507" "508"
+  "509" "510" "511" "512" "513" "514" "515" "516" "517" "518" "519" "520"
+  "521" "522" "523" "524" "525" "526" "527" "528" "529" "530" "531" "532"
+  "533" "534" "535" "536" "537" "538" "539" "540" "541" "542" "543" "544"
+  "545" "546" "547" "548" "549" "550" "551" "552" "553" "554" "555" "556"
+  "557" "558" "559" "560" "561" "562" "563" "564" "565" "566" "567" "568"
+  "569" "570" "571" "572" "573" "574" "575" "576" "577" "578" "579" "580"
+  "581" "582" "583" "584" "585" "586" "587" "588" "589" "590" "591" "592"
+  "593" "594" "595" "596" "597" "598" "599" "600" "601" "602" "603" "604"
+  "605" "606" "607" "608" "609" "610" "611" "612" "613" "614" "615" "616"
+  "617" "618" "619" "620" "621" "622" "623" "624" "625" "626" "627" "628"
+  "629" "630" "631" "632" "633" "634" "635" "636" "637" "638" "639" "640"
+  "641" "642" "643" "644" "645" "646" "647" "648" "649" "650" "651" "652"
+  "653" "654" "655" "656" "657" "658" "659" "660" "661" "662" "663" "664"
+  "665" "666" "667" "668" "669" "670" "671" "672" "673" "674" "675" "676"
+  "677" "678" "679" "680" "681" "682" "683" "684" "685" "686" "687" "688"
+  "689" "690" "691" "692" "693" "694" "695" "696" "697" "698" "699" "700"
+  "701" "702" "703" "704" "705" "706" "707" "708" "709" "710" "711" "712"
+  "713" "714" "715" "716" "717" "718" "719" "720" "721" "722" "723" "724"
+  "725" "726" "727" "728" "729" "730" "731" "732" "733" "734" "735" "736"
+  "737" "738" "739" "740" "741" "742" "743" "744" "745" "746" "747" "748"
+  "749" "750" "751" "752" "753" "754" "755" "756" "757" "758" "759" "760"
+  "761" "762" "763" "764" "765" "766" "767" "768" "769" "770" "771" "772"
+  "773" "774" "775" "776" "777" "778" "779" "780" "781" "782" "783" "784"
+  "785" "786" "787" "788" "789" "790" "791" "792" "793" "794" "795" "796"
+  "797" "798" "799" "800" "801" "802" "803" "804" "805" "806" "807" "808"
+  "809" "810" "811" "812" "813" "814" "815" "816" "817" "818" "819" "820"
+  "821" "822" "823" "824" "825" "826" "827" "828" "829" "830" "831" "832"
+  "833" "834" "835" "836" "837" "838" "839" "840" "841" "842" "843" "844"
+  "845" "846" "847" "848" "849" "850" "851" "852" "853" "854" "855" "856"
+  "857" "858" "859" "860" "861" "862" "863" "864" "865" "866" "867" "868"
+  "869" "870" "871" "872" "873" "874" "875" "876" "877" "878" "879" "880"
+  "881" "882" "883" "884" "885" "886" "887" "888" "889" "890" "891" "892"
+  "893" "894" "895" "896" "897" "898" "899" "900" "901" "902" "903" "904"
+  "905" "906" "907" "908" "909" "910" "911" "912" "913" "914" "915" "916"
+  "917" "918" "919" "920" "921" "922" "923" "924" "925" "926" "927" "928"
+  "929" "930" "931" "932" "933" "934" "935" "936" "937" "938" "939" "940"
+  "941" "942" "943" "944" "945" "946" "947" "948" "949" "950" "951" "952"
+  "953" "954" "955" "956" "957" "958" "959" "960" "961" "962" "963" "964"
+  "965" "966" "967" "968" "969" "970" "971" "972" "973" "974" "975" "976"
+  "977" "978" "979" "980" "981" "982" "983" "984" "985" "986" "987" "988"
+  "989" "990" "991" "992" "993" "994" "995" "996" "997" "998" "999" "1000"
+  ;
 %%
+#include <assert.h>
 
 
 
@@ -229548,22 +223215,12 @@
 static int
 yylex (void)
 {
-  static int return_token = 1;
   static int counter = 1;
-  if (counter > MAX)
-    {
-      assert (counter++ == MAX + 1);
-      return 0;
-    }
-  if (return_token)
-    {
-      return_token = 0;
-      return token;
-    }
-  return_token = 1;
-  return counter++;
+  if (counter <= MAX)
+    return counter++;
+  assert (counter++ == MAX + 1);
+  return 0;
 }
-
 #include <stdlib.h> /* getenv. */
 #include <string.h> /* strcmp. */
 int
@@ -229573,31 +223230,2304 @@
   (void) argv;
   return yyparse ();
 }
-./torture.at:393: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -v -o input.c input.y
-./torture.at:494: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 20
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Reducing stack 0 by rule 18 (line 109):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./torture.at:236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 20
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Reducing stack 0 by rule 18 (line 109):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1492: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stdout:
-./torture.at:497: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20
-stderr:
-./torture.at:497: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./torture.at:500: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900
-stderr:
-./torture.at:500: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./torture.at:504: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000
-stderr:
-memory exhausted
-memory exhausted
-./torture.at:504: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+%code top { /* -*- c -*- */
+/* Adjust to the compiler.
+  We used to do it here, but each time we add a new line,
+  we have to adjust all the line numbers in error messages.
+  It's simpler to use a constant include to a varying file.  */
+#include <testsuite.h>
+}
+
+%define parse.error verbose
+%debug
+%{
+#include <stdio.h>
+#include <stdlib.h>
+#include <assert.h>
+#define MAX 200
+static int yylex (void);
+#include <stdio.h>
+
+/* !POSIX */ static void yyerror (const char *msg);
+%}
+%union
+{
+  int val;
+};
+
+%token END "end"
+%type <val> exp input
+%token t1 1 "1"
+%token t2 2 "2"
+%token t3 3 "3"
+%token t4 4 "4"
+%token t5 5 "5"
+%token t6 6 "6"
+%token t7 7 "7"
+%token t8 8 "8"
+%token t9 9 "9"
+%token t10 10 "10"
+%token t11 11 "11"
+%token t12 12 "12"
+%token t13 13 "13"
+%token t14 14 "14"
+%token t15 15 "15"
+%token t16 16 "16"
+%token t17 17 "17"
+%token t18 18 "18"
+%token t19 19 "19"
+%token t20 20 "20"
+%token t21 21 "21"
+%token t22 22 "22"
+%token t23 23 "23"
+%token t24 24 "24"
+%token t25 25 "25"
+%token t26 26 "26"
+%token t27 27 "27"
+%token t28 28 "28"
+%token t29 29 "29"
+%token t30 30 "30"
+%token t31 31 "31"
+%token t32 32 "32"
+%token t33 33 "33"
+%token t34 34 "34"
+%token t35 35 "35"
+%token t36 36 "36"
+%token t37 37 "37"
+%token t38 38 "38"
+%token t39 39 "39"
+%token t40 40 "40"
+%token t41 41 "41"
+%token t42 42 "42"
+%token t43 43 "43"
+%token t44 44 "44"
+%token t45 45 "45"
+%token t46 46 "46"
+%token t47 47 "47"
+%token t48 48 "48"
+%token t49 49 "49"
+%token t50 50 "50"
+%token t51 51 "51"
+%token t52 52 "52"
+%token t53 53 "53"
+%token t54 54 "54"
+%token t55 55 "55"
+%token t56 56 "56"
+%token t57 57 "57"
+%token t58 58 "58"
+%token t59 59 "59"
+%token t60 60 "60"
+%token t61 61 "61"
+%token t62 62 "62"
+%token t63 63 "63"
+%token t64 64 "64"
+%token t65 65 "65"
+%token t66 66 "66"
+%token t67 67 "67"
+%token t68 68 "68"
+%token t69 69 "69"
+%token t70 70 "70"
+%token t71 71 "71"
+%token t72 72 "72"
+%token t73 73 "73"
+%token t74 74 "74"
+%token t75 75 "75"
+%token t76 76 "76"
+%token t77 77 "77"
+%token t78 78 "78"
+%token t79 79 "79"
+%token t80 80 "80"
+%token t81 81 "81"
+%token t82 82 "82"
+%token t83 83 "83"
+%token t84 84 "84"
+%token t85 85 "85"
+%token t86 86 "86"
+%token t87 87 "87"
+%token t88 88 "88"
+%token t89 89 "89"
+%token t90 90 "90"
+%token t91 91 "91"
+%token t92 92 "92"
+%token t93 93 "93"
+%token t94 94 "94"
+%token t95 95 "95"
+%token t96 96 "96"
+%token t97 97 "97"
+%token t98 98 "98"
+%token t99 99 "99"
+%token t100 100 "100"
+%token t101 101 "101"
+%token t102 102 "102"
+%token t103 103 "103"
+%token t104 104 "104"
+%token t105 105 "105"
+%token t106 106 "106"
+%token t107 107 "107"
+%token t108 108 "108"
+%token t109 109 "109"
+%token t110 110 "110"
+%token t111 111 "111"
+%token t112 112 "112"
+%token t113 113 "113"
+%token t114 114 "114"
+%token t115 115 "115"
+%token t116 116 "116"
+%token t117 117 "117"
+%token t118 118 "118"
+%token t119 119 "119"
+%token t120 120 "120"
+%token t121 121 "121"
+%token t122 122 "122"
+%token t123 123 "123"
+%token t124 124 "124"
+%token t125 125 "125"
+%token t126 126 "126"
+%token t127 127 "127"
+%token t128 128 "128"
+%token t129 129 "129"
+%token t130 130 "130"
+%token t131 131 "131"
+%token t132 132 "132"
+%token t133 133 "133"
+%token t134 134 "134"
+%token t135 135 "135"
+%token t136 136 "136"
+%token t137 137 "137"
+%token t138 138 "138"
+%token t139 139 "139"
+%token t140 140 "140"
+%token t141 141 "141"
+%token t142 142 "142"
+%token t143 143 "143"
+%token t144 144 "144"
+%token t145 145 "145"
+%token t146 146 "146"
+%token t147 147 "147"
+%token t148 148 "148"
+%token t149 149 "149"
+%token t150 150 "150"
+%token t151 151 "151"
+%token t152 152 "152"
+%token t153 153 "153"
+%token t154 154 "154"
+%token t155 155 "155"
+%token t156 156 "156"
+%token t157 157 "157"
+%token t158 158 "158"
+%token t159 159 "159"
+%token t160 160 "160"
+%token t161 161 "161"
+%token t162 162 "162"
+%token t163 163 "163"
+%token t164 164 "164"
+%token t165 165 "165"
+%token t166 166 "166"
+%token t167 167 "167"
+%token t168 168 "168"
+%token t169 169 "169"
+%token t170 170 "170"
+%token t171 171 "171"
+%token t172 172 "172"
+%token t173 173 "173"
+%token t174 174 "174"
+%token t175 175 "175"
+%token t176 176 "176"
+%token t177 177 "177"
+%token t178 178 "178"
+%token t179 179 "179"
+%token t180 180 "180"
+%token t181 181 "181"
+%token t182 182 "182"
+%token t183 183 "183"
+%token t184 184 "184"
+%token t185 185 "185"
+%token t186 186 "186"
+%token t187 187 "187"
+%token t188 188 "188"
+%token t189 189 "189"
+%token t190 190 "190"
+%token t191 191 "191"
+%token t192 192 "192"
+%token t193 193 "193"
+%token t194 194 "194"
+%token t195 195 "195"
+%token t196 196 "196"
+%token t197 197 "197"
+%token t198 198 "198"
+%token t199 199 "199"
+%token t200 200 "200"
+%%
+input:
+  exp        { assert ($1 == 0); $$ = $1; }
+| input exp  { assert ($2 == $1 + 1); $$ = $2; }
+;
+
+exp:
+  END
+    { $$ = 0; }
+| "1"  END 
+    { $$ = 1; }
+| "1" "2"  END 
+    { $$ = 2; }
+| "1" "2" "3"  END 
+    { $$ = 3; }
+| "1" "2" "3" "4"  END 
+    { $$ = 4; }
+| "1" "2" "3" "4" "5"  END 
+    { $$ = 5; }
+| "1" "2" "3" "4" "5" "6"  END 
+    { $$ = 6; }
+| "1" "2" "3" "4" "5" "6" "7"  END 
+    { $$ = 7; }
+| "1" "2" "3" "4" "5" "6" "7" "8"  END 
+    { $$ = 8; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9"  END 
+    { $$ = 9; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10"  END 
+    { $$ = 10; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11"  END 
+    { $$ = 11; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12"  END 
+    { $$ = 12; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13"  END 
+    { $$ = 13; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14"	END 
+    { $$ = 14; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15"  END 
+    { $$ = 15; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" 
+   END 
+    { $$ = 16; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17"  END 
+    { $$ = 17; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18"  END 
+    { $$ = 18; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19"  END 
+    { $$ = 19; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20"	END 
+    { $$ = 20; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21"  END 
+    { $$ = 21; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22"  END 
+    { $$ = 22; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23"  END 
+    { $$ = 23; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24"  END 
+    { $$ = 24; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25"  END 
+    { $$ = 25; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26"  END 
+    { $$ = 26; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27"  END 
+    { $$ = 27; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28"	END 
+    { $$ = 28; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29"  END 
+    { $$ = 29; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" 
+   END 
+    { $$ = 30; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31"  END 
+    { $$ = 31; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32"  END 
+    { $$ = 32; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33"  END 
+    { $$ = 33; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34"	END 
+    { $$ = 34; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35"  END 
+    { $$ = 35; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36"  END 
+    { $$ = 36; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37"  END 
+    { $$ = 37; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38"  END 
+    { $$ = 38; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39"  END 
+    { $$ = 39; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40"  END 
+    { $$ = 40; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41"  END 
+    { $$ = 41; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42"	END 
+    { $$ = 42; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43"  END 
+    { $$ = 43; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" 
+   END 
+    { $$ = 44; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45"  END 
+    { $$ = 45; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46"  END 
+    { $$ = 46; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47"  END 
+    { $$ = 47; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48"	END 
+    { $$ = 48; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49"  END 
+    { $$ = 49; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50"  END 
+    { $$ = 50; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51"  END 
+    { $$ = 51; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52"  END 
+    { $$ = 52; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53"  END 
+    { $$ = 53; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54"  END 
+    { $$ = 54; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55"  END 
+    { $$ = 55; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56"	END 
+    { $$ = 56; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57"  END 
+    { $$ = 57; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" 
+   END 
+    { $$ = 58; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59"  END 
+    { $$ = 59; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60"  END 
+    { $$ = 60; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61"  END 
+    { $$ = 61; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62"	END 
+    { $$ = 62; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63"  END 
+    { $$ = 63; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64"  END 
+    { $$ = 64; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65"  END 
+    { $$ = 65; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66"  END 
+    { $$ = 66; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67"  END 
+    { $$ = 67; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68"  END 
+    { $$ = 68; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69"  END 
+    { $$ = 69; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70"	END 
+    { $$ = 70; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71"  END 
+    { $$ = 71; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" 
+   END 
+    { $$ = 72; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73"  END 
+    { $$ = 73; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74"  END 
+    { $$ = 74; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75"  END 
+    { $$ = 75; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76"	END 
+    { $$ = 76; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77"  END 
+    { $$ = 77; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78"  END 
+    { $$ = 78; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79"  END 
+    { $$ = 79; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80"  END 
+    { $$ = 80; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81"  END 
+    { $$ = 81; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82"  END 
+    { $$ = 82; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83"  END 
+    { $$ = 83; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84"	END 
+    { $$ = 84; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85"  END 
+    { $$ = 85; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" 
+   END 
+    { $$ = 86; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87"  END 
+    { $$ = 87; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88"  END 
+    { $$ = 88; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89"  END 
+    { $$ = 89; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90"	END 
+    { $$ = 90; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91"  END 
+    { $$ = 91; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92"  END 
+    { $$ = 92; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93"  END 
+    { $$ = 93; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94"  END 
+    { $$ = 94; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95"  END 
+    { $$ = 95; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96"  END 
+    { $$ = 96; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97"  END 
+    { $$ = 97; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98"	END 
+    { $$ = 98; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99"  END 
+    { $$ = 99; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" 
+   END 
+    { $$ = 100; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101"  END 
+    { $$ = 101; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102"	END 
+    { $$ = 102; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103"  END 
+    { $$ = 103; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104"  END 
+    { $$ = 104; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105"  END 
+    { $$ = 105; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106"	END 
+    { $$ = 106; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107"  END 
+    { $$ = 107; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108"  END 
+    { $$ = 108; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109"  END 
+    { $$ = 109; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110"	END 
+    { $$ = 110; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111"  END 
+    { $$ = 111; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" 
+   END 
+    { $$ = 112; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113"  END 
+    { $$ = 113; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114"	END 
+    { $$ = 114; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115"  END 
+    { $$ = 115; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116"  END 
+    { $$ = 116; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117"  END 
+    { $$ = 117; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118"	END 
+    { $$ = 118; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119"  END 
+    { $$ = 119; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120"  END 
+    { $$ = 120; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121"  END 
+    { $$ = 121; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122"	END 
+    { $$ = 122; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123"  END 
+    { $$ = 123; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" 
+   END 
+    { $$ = 124; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125"  END 
+    { $$ = 125; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126"	END 
+    { $$ = 126; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127"  END 
+    { $$ = 127; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128"  END 
+    { $$ = 128; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129"  END 
+    { $$ = 129; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130"	END 
+    { $$ = 130; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131"  END 
+    { $$ = 131; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132"  END 
+    { $$ = 132; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133"  END 
+    { $$ = 133; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134"	END 
+    { $$ = 134; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135"  END 
+    { $$ = 135; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" 
+   END 
+    { $$ = 136; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137"  END 
+    { $$ = 137; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138"	END 
+    { $$ = 138; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139"  END 
+    { $$ = 139; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140"  END 
+    { $$ = 140; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141"  END 
+    { $$ = 141; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142"	END 
+    { $$ = 142; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143"  END 
+    { $$ = 143; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144"  END 
+    { $$ = 144; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145"  END 
+    { $$ = 145; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146"	END 
+    { $$ = 146; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147"  END 
+    { $$ = 147; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" 
+   END 
+    { $$ = 148; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149"  END 
+    { $$ = 149; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150"	END 
+    { $$ = 150; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151"  END 
+    { $$ = 151; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152"  END 
+    { $$ = 152; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153"  END 
+    { $$ = 153; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154"	END 
+    { $$ = 154; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155"  END 
+    { $$ = 155; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156"  END 
+    { $$ = 156; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157"  END 
+    { $$ = 157; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158"	END 
+    { $$ = 158; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159"  END 
+    { $$ = 159; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" 
+   END 
+    { $$ = 160; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161"  END 
+    { $$ = 161; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162"	END 
+    { $$ = 162; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163"  END 
+    { $$ = 163; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164"  END 
+    { $$ = 164; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165"  END 
+    { $$ = 165; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166"	END 
+    { $$ = 166; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167"  END 
+    { $$ = 167; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168"  END 
+    { $$ = 168; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169"  END 
+    { $$ = 169; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170"	END 
+    { $$ = 170; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171"  END 
+    { $$ = 171; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" 
+   END 
+    { $$ = 172; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173"  END 
+    { $$ = 173; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174"	END 
+    { $$ = 174; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175"  END 
+    { $$ = 175; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176"  END 
+    { $$ = 176; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177"  END 
+    { $$ = 177; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178"	END 
+    { $$ = 178; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179"  END 
+    { $$ = 179; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179" "180"  END 
+    { $$ = 180; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179" "180" "181"  END 
+    { $$ = 181; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182"	END 
+    { $$ = 182; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183"  END 
+    { $$ = 183; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" 
+   END 
+    { $$ = 184; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
+   "185"  END 
+    { $$ = 185; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
+   "185" "186"	END 
+    { $$ = 186; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
+   "185" "186" "187"  END 
+    { $$ = 187; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
+   "185" "186" "187" "188"  END 
+    { $$ = 188; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
+   "185" "186" "187" "188" "189"  END 
+    { $$ = 189; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
+   "185" "186" "187" "188" "189" "190"	END 
+    { $$ = 190; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
+   "185" "186" "187" "188" "189" "190" "191"  END 
+    { $$ = 191; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
+   "185" "186" "187" "188" "189" "190" "191" "192"  END 
+    { $$ = 192; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
+   "185" "186" "187" "188" "189" "190" "191" "192" "193"  END 
+    { $$ = 193; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
+   "185" "186" "187" "188" "189" "190" "191" "192" "193" "194"	END 
+    { $$ = 194; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
+   "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195"  END 
+    { $$ = 195; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
+   "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" 
+   END 
+    { $$ = 196; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
+   "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196"
+   "197"  END 
+    { $$ = 197; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
+   "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196"
+   "197" "198"	END 
+    { $$ = 198; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
+   "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196"
+   "197" "198" "199"  END 
+    { $$ = 199; }
+| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16"
+   "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30"
+   "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44"
+   "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58"
+   "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72"
+   "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86"
+   "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100"
+   "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112"
+   "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124"
+   "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136"
+   "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148"
+   "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160"
+   "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172"
+   "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184"
+   "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196"
+   "197" "198" "199" "200"  END 
+    { $$ = 200; }
+;
+%%
+
+
+
+
+/* A C error reporting function.  */
+/* !POSIX */ static
+void yyerror (const char *msg)
+{
+  fprintf (stderr, "%s\n", msg);
+}
+static int
+yylex (void)
+{
+  static int inner = 1;
+  static int outer = 0;
+  if (outer > MAX)
+    return 0;
+  else if (inner > outer)
+    {
+      inner = 1;
+      ++outer;
+      return END;
+    }
+  return inner++;
+}
+#include <stdlib.h> /* getenv. */
+#include <string.h> /* strcmp. */
+int
+main (int argc, char const* argv[])
+{
+  (void) argc;
+  (void) argv;
+  return yyparse ();
+}
+./torture.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -v -o input.c input.y
+./calc.at:1492: cat stderr
+input:
+  | (#) + (#) = 2222
+./calc.at:1492:  $PREPARSER ./calc  input
 stderr:
-memory exhausted
-memory exhausted
-./torture.at:510: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./torture.at:510: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./torture.at:237: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
+Reading a token
+Next token is token ')' (1.3: )
+Entering state 11
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
+Entering state 4
+Reading a token
+1.8: syntax error: invalid character: '#'
+Next token is token error (1.8: )
+Shifting token error (1.8: )
+Entering state 11
+Next token is token error (1.8: )
+Error: discarding token error (1.8: )
+Reading a token
+Next token is token ')' (1.9: )
+Entering state 11
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.7: )
+   $2 = token error (1.8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
+Entering state 29
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
+Entering state 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 18
+Reading a token
+Next token is token "number" (1.13-16: 2222)
+Shifting token "number" (1.13-16: 2222)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1494: "$PERL" -ne '
   chomp;
   print "$ARGV:$.: {$_}\n"
@@ -229609,7 +225539,9 @@
         || /\t/
         )' calc.cc calc.hh
 
+./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
+stderr:
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -229624,7 +225556,121 @@
   | 2^2^3 = 256
   | (2^2)^3 = 64
 ./calc.at:1494:  $PREPARSER ./calc  input
+606. torture.at:270: testing State number type: 128 states ...
+./torture.at:270: ruby $abs_top_srcdir/tests/linear 128 >input.y || exit 77
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
+Reading a token
+Next token is token ')' (1.3: )
+Entering state 11
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
+Entering state 4
+Reading a token
+1.8: syntax error: invalid character: '#'
+Next token is token error (1.8: )
+Shifting token error (1.8: )
+Entering state 11
+Next token is token error (1.8: )
+Error: discarding token error (1.8: )
+Reading a token
+Next token is token ')' (1.9: )
+Entering state 11
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.7: )
+   $2 = token error (1.8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
+Entering state 29
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
+Entering state 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 18
+Reading a token
+Next token is token "number" (1.13-16: 2222)
+Shifting token "number" (1.13-16: 2222)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+--- /dev/null	2024-06-18 06:47:31.000000000 +1400
++++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/606/stderr	2024-06-20 12:39:39.215278020 +1400
+@@ -0,0 +1 @@
++/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/606/test-source: line 14: ruby: command not found
+606. torture.at:270:  skipped (torture.at:270)
 stderr:
+./calc.at:1492: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Reading a token
@@ -230462,7 +226508,9 @@
 Cleanup: popping token "end of input" (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
 ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1492: cat stderr
 stderr:
+
 Starting parse
 Entering state 0
 Reading a token
@@ -231186,1048 +227234,207 @@
    $2 = token '=' (12.7: )
    $3 = nterm exp (12.9-11: 256)
 -> $$ = nterm exp (12.1-11: 256)
-Entering state 8
-Next token is token '\n' (12.12-13.0: )
-Shifting token '\n' (12.12-13.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (12.1-11: 256)
-   $2 = token '\n' (12.12-13.0: )
--> $$ = nterm line (12.1-13.0: )
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input (1.1-12.0: )
-   $2 = nterm line (12.1-13.0: )
--> $$ = nterm input (1.1-13.0: )
-Entering state 6
-Reading a token
-Next token is token '(' (13.1: )
-Shifting token '(' (13.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (13.2: 2)
-Shifting token "number" (13.2: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (13.2: 2)
--> $$ = nterm exp (13.2: 2)
-Entering state 12
-Reading a token
-Next token is token '^' (13.3: )
-Shifting token '^' (13.3: )
-Entering state 23
-Reading a token
-Next token is token "number" (13.4: 2)
-Shifting token "number" (13.4: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (13.4: 2)
--> $$ = nterm exp (13.4: 2)
-Entering state 32
-Reading a token
-Next token is token ')' (13.5: )
-Reducing stack 0 by rule 12 (line 103):
-   $1 = nterm exp (13.2: 2)
-   $2 = token '^' (13.3: )
-   $3 = nterm exp (13.4: 2)
--> $$ = nterm exp (13.2-4: 4)
-Entering state 12
-Next token is token ')' (13.5: )
-Shifting token ')' (13.5: )
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' (13.1: )
-   $2 = nterm exp (13.2-4: 4)
-   $3 = token ')' (13.5: )
--> $$ = nterm exp (13.1-5: 4)
-Entering state 8
-Reading a token
-Next token is token '^' (13.6: )
-Shifting token '^' (13.6: )
-Entering state 23
-Reading a token
-Next token is token "number" (13.7: 3)
-Shifting token "number" (13.7: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (13.7: 3)
--> $$ = nterm exp (13.7: 3)
-Entering state 32
-Reading a token
-Next token is token '=' (13.9: )
-Reducing stack 0 by rule 12 (line 103):
-   $1 = nterm exp (13.1-5: 4)
-   $2 = token '^' (13.6: )
-   $3 = nterm exp (13.7: 3)
--> $$ = nterm exp (13.1-7: 64)
-Entering state 8
-Next token is token '=' (13.9: )
-Shifting token '=' (13.9: )
-Entering state 18
-Reading a token
-Next token is token "number" (13.11-12: 64)
-Shifting token "number" (13.11-12: 64)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (13.11-12: 64)
--> $$ = nterm exp (13.11-12: 64)
-Entering state 27
-Reading a token
-Next token is token '\n' (13.13-14.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (13.1-7: 64)
-   $2 = token '=' (13.9: )
-   $3 = nterm exp (13.11-12: 64)
--> $$ = nterm exp (13.1-12: 64)
-Entering state 8
-Next token is token '\n' (13.13-14.0: )
-Shifting token '\n' (13.13-14.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (13.1-12: 64)
-   $2 = token '\n' (13.13-14.0: )
--> $$ = nterm line (13.1-14.0: )
-Entering state 17
-Reducing stack 0 by rule 2 (line 70):
-   $1 = nterm input (1.1-13.0: )
-   $2 = nterm line (13.1-14.0: )
--> $$ = nterm input (1.1-14.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (14.1: )
-Entering state 16
-Cleanup: popping token "end of input" (14.1: )
-Cleanup: popping nterm input (1.1-14.0: )
-input:
-  | 1 2
-./calc.at:1494:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token "number" (1.3: 2)
-1.3: syntax error, unexpected number
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token "number" (1.3: 2)
-./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token "number" (1.3: 2)
-1.3: syntax error, unexpected number
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token "number" (1.3: 2)
-./torture.at:140: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./calc.at:1494: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1494: cat stderr
-input:
-  | 1//2
-./calc.at:1494:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 22
-Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '/' (1.3: )
-./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '/' (1.2: )
-Shifting token '/' (1.2: )
-Entering state 22
-Reading a token
-Next token is token '/' (1.3: )
-1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
-Error: popping token '/' (1.2: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '/' (1.3: )
-./calc.at:1494: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1494: cat stderr
-input:
-  | error
-./calc.at:1494:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "invalid token" (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token "invalid token" (1.1: )
-./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "invalid token" (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token "invalid token" (1.1: )
-./calc.at:1494: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-./calc.at:1494: cat stderr
-stdout:
-./torture.at:513: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20
-stderr:
-./torture.at:513: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | 1 = 2 = 3
-./calc.at:1494:  $PREPARSER ./calc  input
-stderr:
-stderr:
-./torture.at:515: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
-Entering state 18
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 27
-Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Error: popping token '=' (1.3: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '=' (1.7: )
-./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stderr:
-./torture.at:515: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '=' (1.3: )
-Shifting token '=' (1.3: )
-Entering state 18
-Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 27
-Reading a token
-Next token is token '=' (1.7: )
-1.7: syntax error, unexpected '='
-Error: popping nterm exp (1.5: 2)
-Error: popping token '=' (1.3: )
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token '=' (1.7: )
-./torture.at:517: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000
-./calc.at:1494: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-memory exhausted
-memory exhausted
-./torture.at:517: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1494: cat stderr
-stderr:
-memory exhausted
-memory exhausted
-614. torture.at:485:  ok
-input:
-  | 
-  | +1
-./calc.at:1494:  $PREPARSER ./calc  input
-
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 74):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Cleanup: discarding lookahead token '+' (2.1: )
-./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '\n' (1.1-2.0: )
-Shifting token '\n' (1.1-2.0: )
-Entering state 3
-Reducing stack 0 by rule 3 (line 74):
-   $1 = token '\n' (1.1-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Next token is token '+' (2.1: )
-2.1: syntax error, unexpected '+'
-Error: popping nterm input (1.1-2.0: )
-Cleanup: discarding lookahead token '+' (2.1: )
-./calc.at:1494: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1494: cat stderr
-./calc.at:1494:  $PREPARSER ./calc  /dev/null
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" (1.1: )
-./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Now at end of input.
-1.1: syntax error, unexpected end of input
-Cleanup: discarding lookahead token "end of input" (1.1: )
-./calc.at:1494: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-615. torture.at:531: testing Exploding the Stack Size with Malloc ...
-./torture.at:535: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./calc.at:1494: cat stderr
-input:
-  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
-./calc.at:1494:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token ')' (1.2: )
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token ')' (1.2: )
-Shifting token ')' (1.2: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.2: )
--> $$ = nterm exp (1.1-2: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.6: )
-Shifting token '(' (1.6: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.7: 1)
-Shifting token "number" (1.7: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.7: 1)
--> $$ = nterm exp (1.7: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.9: )
-Shifting token '+' (1.9: )
-Entering state 20
-Reading a token
-Next token is token "number" (1.11: 1)
-Shifting token "number" (1.11: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.11: 1)
--> $$ = nterm exp (1.11: 1)
-Entering state 29
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.7: 1)
-   $2 = token '+' (1.9: )
-   $3 = nterm exp (1.11: 1)
--> $$ = nterm exp (1.7-11: 2)
-Entering state 12
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 20
-Reading a token
-Next token is token "number" (1.15: 1)
-Shifting token "number" (1.15: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.15: 1)
--> $$ = nterm exp (1.15: 1)
-Entering state 29
-Reading a token
-Next token is token '+' (1.17: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.7-11: 2)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15: 1)
--> $$ = nterm exp (1.7-15: 3)
-Entering state 12
-Next token is token '+' (1.17: )
-Shifting token '+' (1.17: )
-Entering state 20
-Reading a token
-Next token is token ')' (1.18: )
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' (1.17: )
-Error: popping nterm exp (1.7-15: 3)
-Shifting token error (1.7-18: )
-Entering state 11
-Next token is token ')' (1.18: )
-Shifting token ')' (1.18: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.6: )
-   $2 = token error (1.7-18: )
-   $3 = token ')' (1.18: )
--> $$ = nterm exp (1.6-18: 1111)
-Entering state 29
-Reading a token
-Next token is token '+' (1.20: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-2: 1111)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6-18: 1111)
--> $$ = nterm exp (1.1-18: 2222)
-Entering state 8
-Next token is token '+' (1.20: )
-Shifting token '+' (1.20: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.22: )
-Shifting token '(' (1.22: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.23: )
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.23: )
-Entering state 11
-Next token is token '*' (1.23: )
-Error: discarding token '*' (1.23: )
-Reading a token
-Next token is token '*' (1.25: )
-Error: discarding token '*' (1.25: )
-Reading a token
-Next token is token '*' (1.27: )
-Error: discarding token '*' (1.27: )
-Reading a token
-Next token is token ')' (1.28: )
-Entering state 11
-Next token is token ')' (1.28: )
-Shifting token ')' (1.28: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.22: )
-   $2 = token error (1.23-27: )
-   $3 = token ')' (1.28: )
--> $$ = nterm exp (1.22-28: 1111)
-Entering state 29
-Reading a token
-Next token is token '+' (1.30: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-18: 2222)
-   $2 = token '+' (1.20: )
-   $3 = nterm exp (1.22-28: 1111)
--> $$ = nterm exp (1.1-28: 3333)
-Entering state 8
-Next token is token '+' (1.30: )
-Shifting token '+' (1.30: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.32: )
-Shifting token '(' (1.32: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.33: 1)
-Shifting token "number" (1.33: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.33: 1)
--> $$ = nterm exp (1.33: 1)
-Entering state 12
-Reading a token
-Next token is token '*' (1.35: )
-Shifting token '*' (1.35: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.37: 2)
-Shifting token "number" (1.37: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.37: 2)
--> $$ = nterm exp (1.37: 2)
-Entering state 30
-Reading a token
-Next token is token '*' (1.39: )
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1.33: 1)
-   $2 = token '*' (1.35: )
-   $3 = nterm exp (1.37: 2)
--> $$ = nterm exp (1.33-37: 2)
-Entering state 12
-Next token is token '*' (1.39: )
-Shifting token '*' (1.39: )
-Entering state 21
-Reading a token
-Next token is token '*' (1.41: )
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' (1.39: )
-Error: popping nterm exp (1.33-37: 2)
-Shifting token error (1.33-41: )
-Entering state 11
-Next token is token '*' (1.41: )
-Error: discarding token '*' (1.41: )
-Reading a token
-Next token is token ')' (1.42: )
-Entering state 11
-Next token is token ')' (1.42: )
-Shifting token ')' (1.42: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.32: )
-   $2 = token error (1.33-41: )
-   $3 = token ')' (1.42: )
--> $$ = nterm exp (1.32-42: 1111)
-Entering state 29
-Reading a token
-Next token is token '=' (1.44: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-28: 3333)
-   $2 = token '+' (1.30: )
-   $3 = nterm exp (1.32-42: 1111)
--> $$ = nterm exp (1.1-42: 4444)
-Entering state 8
-Next token is token '=' (1.44: )
-Shifting token '=' (1.44: )
-Entering state 18
-Reading a token
-Next token is token "number" (1.46: 1)
-Shifting token "number" (1.46: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.46: 1)
--> $$ = nterm exp (1.46: 1)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.47-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-42: 4444)
-   $2 = token '=' (1.44: )
-   $3 = nterm exp (1.46: 1)
-1.1-46: error: 4444 != 1
--> $$ = nterm exp (1.1-46: 4444)
-Entering state 8
-Next token is token '\n' (1.47-2.0: )
-Shifting token '\n' (1.47-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-46: 4444)
-   $2 = token '\n' (1.47-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token ')' (1.2: )
-1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token ')' (1.2: )
-Shifting token ')' (1.2: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.2: )
--> $$ = nterm exp (1.1-2: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.6: )
-Shifting token '(' (1.6: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.7: 1)
-Shifting token "number" (1.7: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.7: 1)
--> $$ = nterm exp (1.7: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.9: )
-Shifting token '+' (1.9: )
-Entering state 20
-Reading a token
-Next token is token "number" (1.11: 1)
-Shifting token "number" (1.11: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.11: 1)
--> $$ = nterm exp (1.11: 1)
-Entering state 29
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.7: 1)
-   $2 = token '+' (1.9: )
-   $3 = nterm exp (1.11: 1)
--> $$ = nterm exp (1.7-11: 2)
-Entering state 12
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 20
-Reading a token
-Next token is token "number" (1.15: 1)
-Shifting token "number" (1.15: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.15: 1)
--> $$ = nterm exp (1.15: 1)
-Entering state 29
-Reading a token
-Next token is token '+' (1.17: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.7-11: 2)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15: 1)
--> $$ = nterm exp (1.7-15: 3)
-Entering state 12
-Next token is token '+' (1.17: )
-Shifting token '+' (1.17: )
-Entering state 20
-Reading a token
-Next token is token ')' (1.18: )
-1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
-Error: popping token '+' (1.17: )
-Error: popping nterm exp (1.7-15: 3)
-Shifting token error (1.7-18: )
-Entering state 11
-Next token is token ')' (1.18: )
-Shifting token ')' (1.18: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.6: )
-   $2 = token error (1.7-18: )
-   $3 = token ')' (1.18: )
--> $$ = nterm exp (1.6-18: 1111)
-Entering state 29
-Reading a token
-Next token is token '+' (1.20: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-2: 1111)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6-18: 1111)
--> $$ = nterm exp (1.1-18: 2222)
-Entering state 8
-Next token is token '+' (1.20: )
-Shifting token '+' (1.20: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.22: )
-Shifting token '(' (1.22: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.23: )
-1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.23: )
-Entering state 11
-Next token is token '*' (1.23: )
-Error: discarding token '*' (1.23: )
-Reading a token
-Next token is token '*' (1.25: )
-Error: discarding token '*' (1.25: )
-Reading a token
-Next token is token '*' (1.27: )
-Error: discarding token '*' (1.27: )
-Reading a token
-Next token is token ')' (1.28: )
-Entering state 11
-Next token is token ')' (1.28: )
-Shifting token ')' (1.28: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.22: )
-   $2 = token error (1.23-27: )
-   $3 = token ')' (1.28: )
--> $$ = nterm exp (1.22-28: 1111)
-Entering state 29
-Reading a token
-Next token is token '+' (1.30: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-18: 2222)
-   $2 = token '+' (1.20: )
-   $3 = nterm exp (1.22-28: 1111)
--> $$ = nterm exp (1.1-28: 3333)
-Entering state 8
-Next token is token '+' (1.30: )
-Shifting token '+' (1.30: )
-Entering state 20
+Entering state 8
+Next token is token '\n' (12.12-13.0: )
+Shifting token '\n' (12.12-13.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (12.1-11: 256)
+   $2 = token '\n' (12.12-13.0: )
+-> $$ = nterm line (12.1-13.0: )
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input (1.1-12.0: )
+   $2 = nterm line (12.1-13.0: )
+-> $$ = nterm input (1.1-13.0: )
+Entering state 6
 Reading a token
-Next token is token '(' (1.32: )
-Shifting token '(' (1.32: )
+Next token is token '(' (13.1: )
+Shifting token '(' (13.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.33: 1)
-Shifting token "number" (1.33: 1)
+Next token is token "number" (13.2: 2)
+Shifting token "number" (13.2: 2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.33: 1)
--> $$ = nterm exp (1.33: 1)
+   $1 = token "number" (13.2: 2)
+-> $$ = nterm exp (13.2: 2)
 Entering state 12
 Reading a token
-Next token is token '*' (1.35: )
-Shifting token '*' (1.35: )
-Entering state 21
+Next token is token '^' (13.3: )
+Shifting token '^' (13.3: )
+Entering state 23
 Reading a token
-Next token is token "number" (1.37: 2)
-Shifting token "number" (1.37: 2)
+Next token is token "number" (13.4: 2)
+Shifting token "number" (13.4: 2)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.37: 2)
--> $$ = nterm exp (1.37: 2)
-Entering state 30
+   $1 = token "number" (13.4: 2)
+-> $$ = nterm exp (13.4: 2)
+Entering state 32
 Reading a token
-Next token is token '*' (1.39: )
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1.33: 1)
-   $2 = token '*' (1.35: )
-   $3 = nterm exp (1.37: 2)
--> $$ = nterm exp (1.33-37: 2)
+Next token is token ')' (13.5: )
+Reducing stack 0 by rule 12 (line 103):
+   $1 = nterm exp (13.2: 2)
+   $2 = token '^' (13.3: )
+   $3 = nterm exp (13.4: 2)
+-> $$ = nterm exp (13.2-4: 4)
 Entering state 12
-Next token is token '*' (1.39: )
-Shifting token '*' (1.39: )
-Entering state 21
+Next token is token ')' (13.5: )
+Shifting token ')' (13.5: )
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' (13.1: )
+   $2 = nterm exp (13.2-4: 4)
+   $3 = token ')' (13.5: )
+-> $$ = nterm exp (13.1-5: 4)
+Entering state 8
 Reading a token
-Next token is token '*' (1.41: )
-1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Error: popping token '*' (1.39: )
-Error: popping nterm exp (1.33-37: 2)
-Shifting token error (1.33-41: )
-Entering state 11
-Next token is token '*' (1.41: )
-Error: discarding token '*' (1.41: )
+Next token is token '^' (13.6: )
+Shifting token '^' (13.6: )
+Entering state 23
 Reading a token
-Next token is token ')' (1.42: )
-Entering state 11
-Next token is token ')' (1.42: )
-Shifting token ')' (1.42: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.32: )
-   $2 = token error (1.33-41: )
-   $3 = token ')' (1.42: )
--> $$ = nterm exp (1.32-42: 1111)
-Entering state 29
+Next token is token "number" (13.7: 3)
+Shifting token "number" (13.7: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (13.7: 3)
+-> $$ = nterm exp (13.7: 3)
+Entering state 32
 Reading a token
-Next token is token '=' (1.44: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-28: 3333)
-   $2 = token '+' (1.30: )
-   $3 = nterm exp (1.32-42: 1111)
--> $$ = nterm exp (1.1-42: 4444)
+Next token is token '=' (13.9: )
+Reducing stack 0 by rule 12 (line 103):
+   $1 = nterm exp (13.1-5: 4)
+   $2 = token '^' (13.6: )
+   $3 = nterm exp (13.7: 3)
+-> $$ = nterm exp (13.1-7: 64)
 Entering state 8
-Next token is token '=' (1.44: )
-Shifting token '=' (1.44: )
+Next token is token '=' (13.9: )
+Shifting token '=' (13.9: )
 Entering state 18
 Reading a token
-Next token is token "number" (1.46: 1)
-Shifting token "number" (1.46: 1)
+Next token is token "number" (13.11-12: 64)
+Shifting token "number" (13.11-12: 64)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.46: 1)
--> $$ = nterm exp (1.46: 1)
+   $1 = token "number" (13.11-12: 64)
+-> $$ = nterm exp (13.11-12: 64)
 Entering state 27
 Reading a token
-Next token is token '\n' (1.47-2.0: )
+Next token is token '\n' (13.13-14.0: )
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-42: 4444)
-   $2 = token '=' (1.44: )
-   $3 = nterm exp (1.46: 1)
-1.1-46: error: 4444 != 1
--> $$ = nterm exp (1.1-46: 4444)
+   $1 = nterm exp (13.1-7: 64)
+   $2 = token '=' (13.9: )
+   $3 = nterm exp (13.11-12: 64)
+-> $$ = nterm exp (13.1-12: 64)
 Entering state 8
-Next token is token '\n' (1.47-2.0: )
-Shifting token '\n' (1.47-2.0: )
+Next token is token '\n' (13.13-14.0: )
+Shifting token '\n' (13.13-14.0: )
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-46: 4444)
-   $2 = token '\n' (1.47-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
+   $1 = nterm exp (13.1-12: 64)
+   $2 = token '\n' (13.13-14.0: )
+-> $$ = nterm line (13.1-14.0: )
+Entering state 17
+Reducing stack 0 by rule 2 (line 70):
+   $1 = nterm input (1.1-13.0: )
+   $2 = nterm line (13.1-14.0: )
+-> $$ = nterm input (1.1-14.0: )
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token "end of input" (2.1: )
+Shifting token "end of input" (14.1: )
 Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./torture.at:535: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./calc.at:1494: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1494: cat stderr
+Cleanup: popping token "end of input" (14.1: )
+Cleanup: popping nterm input (1.1-14.0: )
 input:
-  | (!!) + (1 2) = 1
+input:
+  | (1 + #) = 1111
+./calc.at:1492:  $PREPARSER ./calc  input
+  | 1 2
 ./calc.at:1494:  $PREPARSER ./calc  input
 stderr:
+stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '!' (1.2: )
-Shifting token '!' (1.2: )
-Entering state 5
-Reading a token
-Next token is token '!' (1.3: )
-Shifting token '!' (1.3: )
-Entering state 15
-Reducing stack 0 by rule 16 (line 107):
-   $1 = token '!' (1.2: )
-   $2 = token '!' (1.3: )
-Shifting token error (1.2-3: )
-Entering state 11
-Reading a token
-Next token is token ')' (1.4: )
-Shifting token ')' (1.4: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-3: )
-   $3 = token ')' (1.4: )
--> $$ = nterm exp (1.1-4: 1111)
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
 Entering state 8
 Reading a token
-Next token is token '+' (1.6: )
-Shifting token '+' (1.6: )
-Entering state 20
+Next token is token "number" (1.3: 2)
+1.3: syntax error, unexpected number
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token "number" (1.3: 2)
+Starting parse
+Entering state 0
 Reading a token
-Next token is token '(' (1.8: )
-Shifting token '(' (1.8: )
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.9: 1)
-Shifting token "number" (1.9: 1)
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.9: 1)
--> $$ = nterm exp (1.9: 1)
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
 Entering state 12
 Reading a token
-Next token is token "number" (1.11: 2)
-1.11: syntax error, unexpected number
-Error: popping nterm exp (1.9: 1)
-Shifting token error (1.9-11: )
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
 Entering state 11
-Next token is token "number" (1.11: 2)
-Error: discarding token "number" (1.11: 2)
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
 Reading a token
-Next token is token ')' (1.12: )
+Next token is token ')' (1.7: )
 Entering state 11
-Next token is token ')' (1.12: )
-Shifting token ')' (1.12: )
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.8: )
-   $2 = token error (1.9-11: )
-   $3 = token ')' (1.12: )
--> $$ = nterm exp (1.8-12: 1111)
-Entering state 29
-Reading a token
-Next token is token '=' (1.14: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-4: 1111)
-   $2 = token '+' (1.6: )
-   $3 = nterm exp (1.8-12: 1111)
--> $$ = nterm exp (1.1-12: 2222)
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
-Next token is token '=' (1.14: )
-Shifting token '=' (1.14: )
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
 Entering state 18
 Reading a token
-Next token is token "number" (1.16: 1)
-Shifting token "number" (1.16: 1)
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.16: 1)
--> $$ = nterm exp (1.16: 1)
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
 Entering state 27
 Reading a token
-Next token is token '\n' (1.17-2.0: )
+Next token is token '\n' (1.15-2.0: )
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-12: 2222)
-   $2 = token '=' (1.14: )
-   $3 = nterm exp (1.16: 1)
-1.1-16: error: 2222 != 1
--> $$ = nterm exp (1.1-16: 2222)
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
 Entering state 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
@@ -232241,6 +227448,8 @@
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -232249,98 +227458,64 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token '!' (1.2: )
-Shifting token '!' (1.2: )
-Entering state 5
-Reading a token
-Next token is token '!' (1.3: )
-Shifting token '!' (1.3: )
-Entering state 15
-Reducing stack 0 by rule 16 (line 107):
-   $1 = token '!' (1.2: )
-   $2 = token '!' (1.3: )
-Shifting token error (1.2-3: )
-Entering state 11
-Reading a token
-Next token is token ')' (1.4: )
-Shifting token ')' (1.4: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-3: )
-   $3 = token ')' (1.4: )
--> $$ = nterm exp (1.1-4: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.6: )
-Shifting token '+' (1.6: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.8: )
-Shifting token '(' (1.8: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.9: 1)
-Shifting token "number" (1.9: 1)
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.9: 1)
--> $$ = nterm exp (1.9: 1)
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
 Entering state 12
 Reading a token
-Next token is token "number" (1.11: 2)
-1.11: syntax error, unexpected number
-Error: popping nterm exp (1.9: 1)
-Shifting token error (1.9-11: )
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
 Entering state 11
-Next token is token "number" (1.11: 2)
-Error: discarding token "number" (1.11: 2)
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
 Reading a token
-Next token is token ')' (1.12: )
+Next token is token ')' (1.7: )
 Entering state 11
-Next token is token ')' (1.12: )
-Shifting token ')' (1.12: )
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.8: )
-   $2 = token error (1.9-11: )
-   $3 = token ')' (1.12: )
--> $$ = nterm exp (1.8-12: 1111)
-Entering state 29
-Reading a token
-Next token is token '=' (1.14: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-4: 1111)
-   $2 = token '+' (1.6: )
-   $3 = nterm exp (1.8-12: 1111)
--> $$ = nterm exp (1.1-12: 2222)
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
-Next token is token '=' (1.14: )
-Shifting token '=' (1.14: )
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
 Entering state 18
 Reading a token
-Next token is token "number" (1.16: 1)
-Shifting token "number" (1.16: 1)
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.16: 1)
--> $$ = nterm exp (1.16: 1)
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
 Entering state 27
 Reading a token
-Next token is token '\n' (1.17-2.0: )
+Next token is token '\n' (1.15-2.0: )
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-12: 2222)
-   $2 = token '=' (1.14: )
-   $3 = nterm exp (1.16: 1)
-1.1-16: error: 2222 != 1
--> $$ = nterm exp (1.1-16: 2222)
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
 Entering state 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
@@ -232353,6 +227528,21 @@
 Entering state 16
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token "number" (1.3: 2)
+1.3: syntax error, unexpected number
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token "number" (1.3: 2)
 ./calc.at:1494: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -232363,11 +227553,55 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1492: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1494: cat stderr
+./calc.at:1492: cat stderr
+607. torture.at:271: testing State number type: 129 states ...
+./torture.at:271: ruby $abs_top_srcdir/tests/linear 129 >input.y || exit 77
 input:
-  | (- *) + (1 2) = 1
+--- /dev/null	2024-06-18 06:47:31.000000000 +1400
++++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/607/stderr	2024-06-20 12:39:39.587278020 +1400
+@@ -0,0 +1 @@
++/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/607/test-source: line 14: ruby: command not found
+607. torture.at:271:   | 1//2
 ./calc.at:1494:  $PREPARSER ./calc  input
+ skipped (torture.at:271)
+input:
+  | (# + 1) = 1111
+./calc.at:1492:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 22
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '/' (1.3: )
 stderr:
+
+./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -232375,103 +227609,56 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
-Entering state 2
-Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Reducing stack 0 by rule 15 (line 106):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
-Shifting token error (1.2-4: )
-Entering state 11
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Reading a token
-Next token is token ')' (1.5: )
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
 Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
 Reading a token
-Next token is token "number" (1.10: 1)
-Shifting token "number" (1.10: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.10: 1)
--> $$ = nterm exp (1.10: 1)
-Entering state 12
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
 Reading a token
-Next token is token "number" (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Shifting token error (1.10-12: )
-Entering state 11
-Next token is token "number" (1.12: 2)
-Error: discarding token "number" (1.12: 2)
+Next token is token "number" (1.6: 1)
+Error: discarding token "number" (1.6: 1)
 Reading a token
-Next token is token ')' (1.13: )
+Next token is token ')' (1.7: )
 Entering state 11
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
-Entering state 29
-Reading a token
-Next token is token '=' (1.15: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
 Entering state 18
 Reading a token
-Next token is token "number" (1.17: 1)
-Shifting token "number" (1.17: 1)
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.17: 1)
--> $$ = nterm exp (1.17: 1)
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
 Entering state 27
 Reading a token
-Next token is token '\n' (1.18-2.0: )
+Next token is token '\n' (1.15-2.0: )
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
 Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2222)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
@@ -232484,7 +227671,28 @@
 Entering state 16
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '/' (1.2: )
+Shifting token '/' (1.2: )
+Entering state 22
+Reading a token
+Next token is token '/' (1.3: )
+1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!'
+Error: popping token '/' (1.2: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '/' (1.3: )
 stderr:
 Starting parse
 Entering state 0
@@ -232493,103 +227701,56 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
-Entering state 2
-Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Reducing stack 0 by rule 15 (line 106):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
-Shifting token error (1.2-4: )
-Entering state 11
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Reading a token
-Next token is token ')' (1.5: )
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
 Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
 Reading a token
-Next token is token "number" (1.10: 1)
-Shifting token "number" (1.10: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.10: 1)
--> $$ = nterm exp (1.10: 1)
-Entering state 12
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
 Reading a token
-Next token is token "number" (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Shifting token error (1.10-12: )
-Entering state 11
-Next token is token "number" (1.12: 2)
-Error: discarding token "number" (1.12: 2)
+Next token is token "number" (1.6: 1)
+Error: discarding token "number" (1.6: 1)
 Reading a token
-Next token is token ')' (1.13: )
+Next token is token ')' (1.7: )
 Entering state 11
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
-Entering state 29
-Reading a token
-Next token is token '=' (1.15: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
 Entering state 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
 Entering state 18
 Reading a token
-Next token is token "number" (1.17: 1)
-Shifting token "number" (1.17: 1)
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.17: 1)
--> $$ = nterm exp (1.17: 1)
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
 Entering state 27
 Reading a token
-Next token is token '\n' (1.18-2.0: )
+Next token is token '\n' (1.15-2.0: )
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
 Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2222)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
@@ -232612,54 +227773,79 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1492: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1494: cat stderr
+./calc.at:1492: cat stderr
 input:
-  | (* *) + (*) + (*)
+  | error
 ./calc.at:1494:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
 Reading a token
+Next token is token "invalid token" (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" (1.1: )
+input:
+./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+608. torture.at:272: testing State number type: 256 states ...
+./torture.at:272: ruby $abs_top_srcdir/tests/linear 256 >input.y || exit 77
+  | (1 + # + 1) = 1111
+./calc.at:1492:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "invalid token" (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token "invalid token" (1.1: )
+--- /dev/null	2024-06-18 06:47:31.000000000 +1400
++++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/608/stderr	2024-06-20 12:39:39.999278020 +1400
+@@ -0,0 +1 @@
++/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/608/test-source: line 14: ruby: command not found
+stderr:
+608. torture.at:272: Starting parse
+Entering state 0
+Reading a token
 Next token is token '(' (1.1: )
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
 Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
 Reading a token
-Next token is token ')' (1.5: )
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
 Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 20
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
 Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
 Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
+Next token is token "number" (1.10: 1)
+Error: discarding token "number" (1.10: 1)
 Reading a token
 Next token is token ')' (1.11: )
 Entering state 11
@@ -232667,59 +227853,37 @@
 Shifting token ')' (1.11: )
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
    $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
-Entering state 29
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
+-> $$ = nterm exp (1.1-11: 1111)
 Entering state 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
-Entering state 4
 Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 18
 Reading a token
-Next token is token ')' (1.17: )
-Entering state 11
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 29
+Next token is token "number" (1.15-18: 1111)
+Shifting token "number" (1.15-18: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 27
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
+Next token is token '\n' (1.19-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
 Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 3333)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
@@ -232732,7 +227896,18 @@
 Entering state 16
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+ skipped (torture.at:272)
+./calc.at:1494: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
 Starting parse
 Entering state 0
@@ -232741,42 +227916,32 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
 Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
 Reading a token
-Next token is token ')' (1.5: )
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
 Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 20
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
 Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
 Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
+Next token is token "number" (1.10: 1)
+Error: discarding token "number" (1.10: 1)
 Reading a token
 Next token is token ')' (1.11: )
 Entering state 11
@@ -232784,59 +227949,37 @@
 Shifting token ')' (1.11: )
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
    $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
-Entering state 29
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
+-> $$ = nterm exp (1.1-11: 1111)
 Entering state 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
-Entering state 4
 Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 18
 Reading a token
-Next token is token ')' (1.17: )
-Entering state 11
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 29
+Next token is token "number" (1.15-18: 1111)
+Shifting token "number" (1.15-18: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 27
 Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
+Next token is token '\n' (1.19-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
 Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 3333)
-   $2 = token '\n' (1.18-2.0: )
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
@@ -232849,7 +227992,9 @@
 Entering state 16
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1494: "$PERL" -pi -e 'use strict;
+
+./calc.at:1494: cat stderr
+./calc.at:1492: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -232859,10 +228004,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1494: cat stderr
 input:
-  | 1 + 2 * 3 + !+ ++
+  | 1 = 2 = 3
 ./calc.at:1494:  $PREPARSER ./calc  input
+./calc.at:1492: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -232875,9 +228020,9 @@
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 18
 Reading a token
 Next token is token "number" (1.5: 2)
 Shifting token "number" (1.5: 2)
@@ -232885,52 +228030,22 @@
 Reducing stack 0 by rule 5 (line 79):
    $1 = token "number" (1.5: 2)
 -> $$ = nterm exp (1.5: 2)
-Entering state 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
+Entering state 27
 Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Reducing stack 0 by rule 17 (line 108):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Error: popping token '=' (1.3: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '=' (1.7: )
+input:
 ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (1 + 1) / (1 - 1)
+./calc.at:1492:  $PREPARSER ./calc  input
 stderr:
+stderr:
+609. torture.at:273: testing State number type: 257 states ...
+./torture.at:273: ruby $abs_top_srcdir/tests/linear 257 >input.y || exit 77
 Starting parse
 Entering state 0
 Reading a token
@@ -232942,9 +228057,9 @@
 -> $$ = nterm exp (1.1: 1)
 Entering state 8
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
+Next token is token '=' (1.3: )
+Shifting token '=' (1.3: )
+Entering state 18
 Reading a token
 Next token is token "number" (1.5: 2)
 Shifting token "number" (1.5: 2)
@@ -232952,186 +228067,135 @@
 Reducing stack 0 by rule 5 (line 79):
    $1 = token "number" (1.5: 2)
 -> $$ = nterm exp (1.5: 2)
-Entering state 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
+Entering state 27
 Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Reducing stack 0 by rule 17 (line 108):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1494:  $PREPARSER ./calc  input
-stderr:
+Next token is token '=' (1.7: )
+1.7: syntax error, unexpected '='
+Error: popping nterm exp (1.5: 2)
+Error: popping token '=' (1.3: )
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token '=' (1.7: )
 Starting parse
 Entering state 0
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
 Entering state 20
 Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
+Next token is token "number" (1.6: 1)
+Shifting token "number" (1.6: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
+   $1 = token "number" (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
 Entering state 29
 Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
-Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Next token is token '+' (1.11: )
+Next token is token ')' (1.7: )
 Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
 Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
 Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 22
 Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Reducing stack 0 by rule 18 (line 109):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
 Reading a token
-Next token is token "number" (1.1: 1)
-Shifting token "number" (1.1: 1)
+Next token is token "number" (1.12: 1)
+Shifting token "number" (1.12: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
+   $1 = token "number" (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
 Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 19
 Reading a token
-Next token is token "number" (1.5: 2)
-Shifting token "number" (1.5: 2)
+Next token is token "number" (1.16: 1)
+Shifting token "number" (1.16: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
+   $1 = token "number" (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
 Reading a token
-Next token is token "number" (1.9: 3)
-Shifting token "number" (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
+Next token is token ')' (1.17: )
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 31
 Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 10 (line 93):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
 Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
 Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Reducing stack 0 by rule 18 (line 109):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+--- /dev/null	2024-06-18 06:47:31.000000000 +1400
++++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/609/stderr	2024-06-20 12:39:40.299278020 +1400
+@@ -0,0 +1 @@
++/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/609/test-source: line 14: ruby: command not found
+609. torture.at:273:  skipped (torture.at:273)
+./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1494: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -233142,11 +228206,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1494: cat stderr
-input:
-  | (#) + (#) = 2222
-./calc.at:1494:  $PREPARSER ./calc  input
 stderr:
+
 Starting parse
 Entering state 0
 Reading a token
@@ -233154,84 +228215,102 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
 Reading a token
-Next token is token ')' (1.3: )
-Entering state 11
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.6: 1)
+Shifting token "number" (1.6: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 29
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
 Entering state 8
 Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
-Entering state 20
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 22
 Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
 Entering state 4
 Reading a token
-1.8: syntax error: invalid character: '#'
-Next token is token error (1.8: )
-Shifting token error (1.8: )
-Entering state 11
-Next token is token error (1.8: )
-Error: discarding token error (1.8: )
-Reading a token
-Next token is token ')' (1.9: )
-Entering state 11
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 29
+Next token is token "number" (1.12: 1)
+Shifting token "number" (1.12: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
 Reading a token
-Next token is token '=' (1.11: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
-Entering state 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 18
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 19
 Reading a token
-Next token is token "number" (1.13-16: 2222)
-Shifting token "number" (1.13-16: 2222)
+Next token is token "number" (1.16: 1)
+Shifting token "number" (1.16: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
-Entering state 27
+   $1 = token "number" (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
 Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
+Next token is token ')' (1.17: )
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 31
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 10 (line 93):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
 Entering state 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
@@ -233244,93 +228323,59 @@
 Entering state 16
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1494: cat stderr
+./calc.at:1492: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+input:
+  | 
+  | +1
+./calc.at:1494:  $PREPARSER ./calc  input
+./calc.at:1492: cat stderr
 stderr:
 Starting parse
 Entering state 0
 Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token ')' (1.3: )
-Entering state 11
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
-Entering state 4
-Reading a token
-1.8: syntax error: invalid character: '#'
-Next token is token error (1.8: )
-Shifting token error (1.8: )
-Entering state 11
-Next token is token error (1.8: )
-Error: discarding token error (1.8: )
-Reading a token
-Next token is token ')' (1.9: )
-Entering state 11
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 29
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
-Entering state 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 18
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 74):
+   $1 = token '\n' (1.1-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
 Reading a token
-Next token is token "number" (1.13-16: 2222)
-Shifting token "number" (1.13-16: 2222)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
-Entering state 27
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Cleanup: discarding lookahead token '+' (2.1: )
+567. calc.at:1492:  ok
+./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+610. torture.at:274: testing State number type: 32768 states ...
+./torture.at:274: ruby $abs_top_srcdir/tests/linear 32768 >input.y || exit 77
+--- /dev/null	2024-06-18 06:47:31.000000000 +1400
++++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/610/stderr	2024-06-20 12:39:40.559278020 +1400
+@@ -0,0 +1 @@
++/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/610/test-source: line 14: ruby: command not found
+610. torture.at:274: Starting parse
+Entering state 0
 Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
+Next token is token '\n' (1.1-2.0: )
+Shifting token '\n' (1.1-2.0: )
+Entering state 3
+Reducing stack 0 by rule 3 (line 74):
+   $1 = token '\n' (1.1-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
@@ -233338,13 +228383,54 @@
 -> $$ = nterm input (1.1-2.0: )
 Entering state 6
 Reading a token
+Next token is token '+' (2.1: )
+2.1: syntax error, unexpected '+'
+Error: popping nterm input (1.1-2.0: )
+Cleanup: discarding lookahead token '+' (2.1: )
+ skipped (torture.at:274)
+./calc.at:1494: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+
+./calc.at:1494: cat stderr
+
+./calc.at:1494:  $PREPARSER ./calc  /dev/null
+stderr:
+Starting parse
+Entering state 0
+Reading a token
 Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" (1.1: )
+./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./calc.at:1494: "$PERL" -pi -e 'use strict;
+612. torture.at:276: testing State number type: 65537 states ...
+./torture.at:276: ruby $abs_top_srcdir/tests/linear 65537 >input.y || exit 77
+Starting parse
+Entering state 0
+Reading a token
+Now at end of input.
+1.1: syntax error, unexpected end of input
+Cleanup: discarding lookahead token "end of input" (1.1: )
+611. torture.at:275: testing State number type: 65536 states ...
+./torture.at:275: ruby $abs_top_srcdir/tests/linear 65536 >input.y || exit 77
+--- /dev/null	2024-06-18 06:47:31.000000000 +1400
++++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/612/stderr	2024-06-20 12:39:40.803278020 +1400
+@@ -0,0 +1 @@
++/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/612/test-source: line 14: ruby: command not found
+612. torture.at:276:  skipped (torture.at:276)
+--- /dev/null	2024-06-18 06:47:31.000000000 +1400
++++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/611/stderr	2024-06-20 12:39:40.823278020 +1400
+@@ -0,0 +1 @@
++/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/611/test-source: line 14: ruby: command not found
+611. torture.at:275: ./calc.at:1494: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -233354,6 +228440,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+ skipped (torture.at:275)
 stdout:
 ./calc.at:1491: "$PERL" -ne '
   chomp;
@@ -233366,11 +228454,11 @@
         || /\t/
         )' calc.cc calc.hh
 
+
 ./calc.at:1494: cat stderr
 input:
-  | (1 + #) = 1111
+
 input:
-./calc.at:1494:  $PREPARSER ./calc  input
   | 1 + 2 * 3 = 7
   | 1 + 2 * -3 = -5
   | 
@@ -233385,163 +228473,9 @@
   | 2^2^3 = 256
   | (2^2)^3 = 64
 ./calc.at:1491:  $PREPARSER ./calc  input
+  | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+./calc.at:1494:  $PREPARSER ./calc  input
 stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
-Reading a token
-Next token is token ')' (1.7: )
-Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 18
-Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
-Reading a token
-Next token is token ')' (1.7: )
-Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 18
-Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
 stderr:
 Starting parse
 Entering state 0
@@ -234379,19 +229313,504 @@
 Entering state 16
 Cleanup: popping token end of input (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
-./calc.at:1494: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token ')' (1.2: )
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Next token is token ')' (1.2: )
+Shifting token ')' (1.2: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.2: )
+-> $$ = nterm exp (1.1-2: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.6: )
+Shifting token '(' (1.6: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.7: 1)
+Shifting token "number" (1.7: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.7: 1)
+-> $$ = nterm exp (1.7: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.9: )
+Shifting token '+' (1.9: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.11: 1)
+Shifting token "number" (1.11: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.11: 1)
+-> $$ = nterm exp (1.11: 1)
+Entering state 29
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.7: 1)
+   $2 = token '+' (1.9: )
+   $3 = nterm exp (1.11: 1)
+-> $$ = nterm exp (1.7-11: 2)
+Entering state 12
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.15: 1)
+Shifting token "number" (1.15: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.15: 1)
+-> $$ = nterm exp (1.15: 1)
+Entering state 29
+Reading a token
+Next token is token '+' (1.17: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.7-11: 2)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15: 1)
+-> $$ = nterm exp (1.7-15: 3)
+Entering state 12
+Next token is token '+' (1.17: )
+Shifting token '+' (1.17: )
+Entering state 20
+Reading a token
+Next token is token ')' (1.18: )
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' (1.17: )
+Error: popping nterm exp (1.7-15: 3)
+Shifting token error (1.7-18: )
+Entering state 11
+Next token is token ')' (1.18: )
+Shifting token ')' (1.18: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.6: )
+   $2 = token error (1.7-18: )
+   $3 = token ')' (1.18: )
+-> $$ = nterm exp (1.6-18: 1111)
+Entering state 29
+Reading a token
+Next token is token '+' (1.20: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-2: 1111)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6-18: 1111)
+-> $$ = nterm exp (1.1-18: 2222)
+Entering state 8
+Next token is token '+' (1.20: )
+Shifting token '+' (1.20: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.22: )
+Shifting token '(' (1.22: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.23: )
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.23: )
+Entering state 11
+Next token is token '*' (1.23: )
+Error: discarding token '*' (1.23: )
+Reading a token
+Next token is token '*' (1.25: )
+Error: discarding token '*' (1.25: )
+Reading a token
+Next token is token '*' (1.27: )
+Error: discarding token '*' (1.27: )
+Reading a token
+Next token is token ')' (1.28: )
+Entering state 11
+Next token is token ')' (1.28: )
+Shifting token ')' (1.28: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.22: )
+   $2 = token error (1.23-27: )
+   $3 = token ')' (1.28: )
+-> $$ = nterm exp (1.22-28: 1111)
+Entering state 29
+Reading a token
+Next token is token '+' (1.30: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-18: 2222)
+   $2 = token '+' (1.20: )
+   $3 = nterm exp (1.22-28: 1111)
+-> $$ = nterm exp (1.1-28: 3333)
+Entering state 8
+Next token is token '+' (1.30: )
+Shifting token '+' (1.30: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.32: )
+Shifting token '(' (1.32: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.33: 1)
+Shifting token "number" (1.33: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.33: 1)
+-> $$ = nterm exp (1.33: 1)
+Entering state 12
+Reading a token
+Next token is token '*' (1.35: )
+Shifting token '*' (1.35: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.37: 2)
+Shifting token "number" (1.37: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.37: 2)
+-> $$ = nterm exp (1.37: 2)
+Entering state 30
+Reading a token
+Next token is token '*' (1.39: )
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1.33: 1)
+   $2 = token '*' (1.35: )
+   $3 = nterm exp (1.37: 2)
+-> $$ = nterm exp (1.33-37: 2)
+Entering state 12
+Next token is token '*' (1.39: )
+Shifting token '*' (1.39: )
+Entering state 21
+Reading a token
+Next token is token '*' (1.41: )
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' (1.39: )
+Error: popping nterm exp (1.33-37: 2)
+Shifting token error (1.33-41: )
+Entering state 11
+Next token is token '*' (1.41: )
+Error: discarding token '*' (1.41: )
+Reading a token
+Next token is token ')' (1.42: )
+Entering state 11
+Next token is token ')' (1.42: )
+Shifting token ')' (1.42: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.32: )
+   $2 = token error (1.33-41: )
+   $3 = token ')' (1.42: )
+-> $$ = nterm exp (1.32-42: 1111)
+Entering state 29
+Reading a token
+Next token is token '=' (1.44: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-28: 3333)
+   $2 = token '+' (1.30: )
+   $3 = nterm exp (1.32-42: 1111)
+-> $$ = nterm exp (1.1-42: 4444)
+Entering state 8
+Next token is token '=' (1.44: )
+Shifting token '=' (1.44: )
+Entering state 18
+Reading a token
+Next token is token "number" (1.46: 1)
+Shifting token "number" (1.46: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.46: 1)
+-> $$ = nterm exp (1.46: 1)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.47-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-42: 4444)
+   $2 = token '=' (1.44: )
+   $3 = nterm exp (1.46: 1)
+1.1-46: error: 4444 != 1
+-> $$ = nterm exp (1.1-46: 4444)
+Entering state 8
+Next token is token '\n' (1.47-2.0: )
+Shifting token '\n' (1.47-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-46: 4444)
+   $2 = token '\n' (1.47-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./calc.at:1494: cat stderr
 stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token ')' (1.2: )
+1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Next token is token ')' (1.2: )
+Shifting token ')' (1.2: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.2: )
+-> $$ = nterm exp (1.1-2: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.6: )
+Shifting token '(' (1.6: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.7: 1)
+Shifting token "number" (1.7: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.7: 1)
+-> $$ = nterm exp (1.7: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.9: )
+Shifting token '+' (1.9: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.11: 1)
+Shifting token "number" (1.11: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.11: 1)
+-> $$ = nterm exp (1.11: 1)
+Entering state 29
+Reading a token
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.7: 1)
+   $2 = token '+' (1.9: )
+   $3 = nterm exp (1.11: 1)
+-> $$ = nterm exp (1.7-11: 2)
+Entering state 12
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.15: 1)
+Shifting token "number" (1.15: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.15: 1)
+-> $$ = nterm exp (1.15: 1)
+Entering state 29
+Reading a token
+Next token is token '+' (1.17: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.7-11: 2)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15: 1)
+-> $$ = nterm exp (1.7-15: 3)
+Entering state 12
+Next token is token '+' (1.17: )
+Shifting token '+' (1.17: )
+Entering state 20
+Reading a token
+Next token is token ')' (1.18: )
+1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
+Error: popping token '+' (1.17: )
+Error: popping nterm exp (1.7-15: 3)
+Shifting token error (1.7-18: )
+Entering state 11
+Next token is token ')' (1.18: )
+Shifting token ')' (1.18: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.6: )
+   $2 = token error (1.7-18: )
+   $3 = token ')' (1.18: )
+-> $$ = nterm exp (1.6-18: 1111)
+Entering state 29
+Reading a token
+Next token is token '+' (1.20: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-2: 1111)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6-18: 1111)
+-> $$ = nterm exp (1.1-18: 2222)
+Entering state 8
+Next token is token '+' (1.20: )
+Shifting token '+' (1.20: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.22: )
+Shifting token '(' (1.22: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.23: )
+1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.23: )
+Entering state 11
+Next token is token '*' (1.23: )
+Error: discarding token '*' (1.23: )
+Reading a token
+Next token is token '*' (1.25: )
+Error: discarding token '*' (1.25: )
+Reading a token
+Next token is token '*' (1.27: )
+Error: discarding token '*' (1.27: )
+Reading a token
+Next token is token ')' (1.28: )
+Entering state 11
+Next token is token ')' (1.28: )
+Shifting token ')' (1.28: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.22: )
+   $2 = token error (1.23-27: )
+   $3 = token ')' (1.28: )
+-> $$ = nterm exp (1.22-28: 1111)
+Entering state 29
+Reading a token
+Next token is token '+' (1.30: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-18: 2222)
+   $2 = token '+' (1.20: )
+   $3 = nterm exp (1.22-28: 1111)
+-> $$ = nterm exp (1.1-28: 3333)
+Entering state 8
+Next token is token '+' (1.30: )
+Shifting token '+' (1.30: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.32: )
+Shifting token '(' (1.32: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.33: 1)
+Shifting token "number" (1.33: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.33: 1)
+-> $$ = nterm exp (1.33: 1)
+Entering state 12
+Reading a token
+Next token is token '*' (1.35: )
+Shifting token '*' (1.35: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.37: 2)
+Shifting token "number" (1.37: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.37: 2)
+-> $$ = nterm exp (1.37: 2)
+Entering state 30
+Reading a token
+Next token is token '*' (1.39: )
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1.33: 1)
+   $2 = token '*' (1.35: )
+   $3 = nterm exp (1.37: 2)
+-> $$ = nterm exp (1.33-37: 2)
+Entering state 12
+Next token is token '*' (1.39: )
+Shifting token '*' (1.39: )
+Entering state 21
+Reading a token
+Next token is token '*' (1.41: )
+1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Error: popping token '*' (1.39: )
+Error: popping nterm exp (1.33-37: 2)
+Shifting token error (1.33-41: )
+Entering state 11
+Next token is token '*' (1.41: )
+Error: discarding token '*' (1.41: )
+Reading a token
+Next token is token ')' (1.42: )
+Entering state 11
+Next token is token ')' (1.42: )
+Shifting token ')' (1.42: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.32: )
+   $2 = token error (1.33-41: )
+   $3 = token ')' (1.42: )
+-> $$ = nterm exp (1.32-42: 1111)
+Entering state 29
+Reading a token
+Next token is token '=' (1.44: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-28: 3333)
+   $2 = token '+' (1.30: )
+   $3 = nterm exp (1.32-42: 1111)
+-> $$ = nterm exp (1.1-42: 4444)
+Entering state 8
+Next token is token '=' (1.44: )
+Shifting token '=' (1.44: )
+Entering state 18
+Reading a token
+Next token is token "number" (1.46: 1)
+Shifting token "number" (1.46: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.46: 1)
+-> $$ = nterm exp (1.46: 1)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.47-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-42: 4444)
+   $2 = token '=' (1.44: )
+   $3 = nterm exp (1.46: 1)
+1.1-46: error: 4444 != 1
+-> $$ = nterm exp (1.1-46: 4444)
+Entering state 8
+Next token is token '\n' (1.47-2.0: )
+Shifting token '\n' (1.47-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-46: 4444)
+   $2 = token '\n' (1.47-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 Starting parse
 Entering state 0
 Reading a token
@@ -235229,12 +230648,3312 @@
 Cleanup: popping token end of input (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
 input:
+./calc.at:1494: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
   | 1 2
 ./calc.at:1491:  $PREPARSER ./calc  input
-input:
-  | (# + 1) = 1111
+613. torture.at:385: testing Many lookahead tokens ...
+./torture.at:387: "$PERL" -w ./gengram.pl 1000 || exit 77
+614. torture.at:485: testing Exploding the Stack Size with Alloca ...
+./torture.at:494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 stderr:
+./calc.at:1494: cat stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token number (1.3: 2)
+1.3: syntax error, unexpected number
+Error: popping nterm exp (1.1: 1)
+Cleanup: discarding lookahead token number (1.3: 2)
+input:
+  | (!!) + (1 2) = 1
 ./calc.at:1494:  $PREPARSER ./calc  input
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stdout:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '!' (1.2: )
+Shifting token '!' (1.2: )
+Entering state 5
+Reading a token
+Next token is token '!' (1.3: )
+Shifting token '!' (1.3: )
+Entering state 15
+Reducing stack 0 by rule 16 (line 107):
+   $1 = token '!' (1.2: )
+   $2 = token '!' (1.3: )
+Shifting token error (1.2-3: )
+Entering state 11
+Reading a token
+Next token is token ')' (1.4: )
+Shifting token ')' (1.4: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-3: )
+   $3 = token ')' (1.4: )
+-> $$ = nterm exp (1.1-4: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.6: )
+Shifting token '+' (1.6: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.8: )
+Shifting token '(' (1.8: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.9: 1)
+Shifting token "number" (1.9: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.9: 1)
+-> $$ = nterm exp (1.9: 1)
+Entering state 12
+Reading a token
+Next token is token "number" (1.11: 2)
+1.11: syntax error, unexpected number
+Error: popping nterm exp (1.9: 1)
+Shifting token error (1.9-11: )
+Entering state 11
+Next token is token "number" (1.11: 2)
+Error: discarding token "number" (1.11: 2)
+Reading a token
+Next token is token ')' (1.12: )
+Entering state 11
+Next token is token ')' (1.12: )
+Shifting token ')' (1.12: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.8: )
+   $2 = token error (1.9-11: )
+   $3 = token ')' (1.12: )
+-> $$ = nterm exp (1.8-12: 1111)
+Entering state 29
+Reading a token
+Next token is token '=' (1.14: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-4: 1111)
+   $2 = token '+' (1.6: )
+   $3 = nterm exp (1.8-12: 1111)
+-> $$ = nterm exp (1.1-12: 2222)
+Entering state 8
+Next token is token '=' (1.14: )
+Shifting token '=' (1.14: )
+Entering state 18
+Reading a token
+Next token is token "number" (1.16: 1)
+Shifting token "number" (1.16: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-12: 2222)
+   $2 = token '=' (1.14: )
+   $3 = nterm exp (1.16: 1)
+1.1-16: error: 2222 != 1
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+%define parse.error verbose
+%debug
+%{
+/* Adjust to the compiler.
+  We used to do it here, but each time we add a new line,
+  we have to adjust all the line numbers in error messages.
+  It's simpler to use a constant include to a varying file.  */
+#include <testsuite.h>
+
+# include <stdio.h>
+# include <stdlib.h>
+# include <assert.h>
+# define MAX 1000
+static int yylex (void);
+#include <stdio.h>
+
+/* !POSIX */ static void yyerror (const char *msg);
+%}
+%union
+{
+  int val;
+};
+
+%type <val> input exp
+%token token
+%type <val> n1 n2 n3 n4 n5 n6 n7 n8 n9 n10 n11 n12 n13 n14 n15 n16 n17 n18
+	    n19 n20 n21 n22 n23 n24 n25 n26 n27 n28 n29 n30 n31 n32 n33 n34
+	    n35 n36 n37 n38 n39 n40 n41 n42 n43 n44 n45 n46 n47 n48 n49 n50
+	    n51 n52 n53 n54 n55 n56 n57 n58 n59 n60 n61 n62 n63 n64 n65 n66
+	    n67 n68 n69 n70 n71 n72 n73 n74 n75 n76 n77 n78 n79 n80 n81 n82
+	    n83 n84 n85 n86 n87 n88 n89 n90 n91 n92 n93 n94 n95 n96 n97 n98
+	    n99 n100 n101 n102 n103 n104 n105 n106 n107 n108 n109 n110 n111
+	    n112 n113 n114 n115 n116 n117 n118 n119 n120 n121 n122 n123
+	    n124 n125 n126 n127 n128 n129 n130 n131 n132 n133 n134 n135
+	    n136 n137 n138 n139 n140 n141 n142 n143 n144 n145 n146 n147
+	    n148 n149 n150 n151 n152 n153 n154 n155 n156 n157 n158 n159
+	    n160 n161 n162 n163 n164 n165 n166 n167 n168 n169 n170 n171
+	    n172 n173 n174 n175 n176 n177 n178 n179 n180 n181 n182 n183
+	    n184 n185 n186 n187 n188 n189 n190 n191 n192 n193 n194 n195
+	    n196 n197 n198 n199 n200 n201 n202 n203 n204 n205 n206 n207
+	    n208 n209 n210 n211 n212 n213 n214 n215 n216 n217 n218 n219
+	    n220 n221 n222 n223 n224 n225 n226 n227 n228 n229 n230 n231
+	    n232 n233 n234 n235 n236 n237 n238 n239 n240 n241 n242 n243
+	    n244 n245 n246 n247 n248 n249 n250 n251 n252 n253 n254 n255
+	    n256 n257 n258 n259 n260 n261 n262 n263 n264 n265 n266 n267
+	    n268 n269 n270 n271 n272 n273 n274 n275 n276 n277 n278 n279
+	    n280 n281 n282 n283 n284 n285 n286 n287 n288 n289 n290 n291
+	    n292 n293 n294 n295 n296 n297 n298 n299 n300 n301 n302 n303
+	    n304 n305 n306 n307 n308 n309 n310 n311 n312 n313 n314 n315
+	    n316 n317 n318 n319 n320 n321 n322 n323 n324 n325 n326 n327
+	    n328 n329 n330 n331 n332 n333 n334 n335 n336 n337 n338 n339
+	    n340 n341 n342 n343 n344 n345 n346 n347 n348 n349 n350 n351
+	    n352 n353 n354 n355 n356 n357 n358 n359 n360 n361 n362 n363
+	    n364 n365 n366 n367 n368 n369 n370 n371 n372 n373 n374 n375
+	    n376 n377 n378 n379 n380 n381 n382 n383 n384 n385 n386 n387
+	    n388 n389 n390 n391 n392 n393 n394 n395 n396 n397 n398 n399
+	    n400 n401 n402 n403 n404 n405 n406 n407 n408 n409 n410 n411
+	    n412 n413 n414 n415 n416 n417 n418 n419 n420 n421 n422 n423
+	    n424 n425 n426 n427 n428 n429 n430 n431 n432 n433 n434 n435
+	    n436 n437 n438 n439 n440 n441 n442 n443 n444 n445 n446 n447
+	    n448 n449 n450 n451 n452 n453 n454 n455 n456 n457 n458 n459
+	    n460 n461 n462 n463 n464 n465 n466 n467 n468 n469 n470 n471
+	    n472 n473 n474 n475 n476 n477 n478 n479 n480 n481 n482 n483
+	    n484 n485 n486 n487 n488 n489 n490 n491 n492 n493 n494 n495
+	    n496 n497 n498 n499 n500 n501 n502 n503 n504 n505 n506 n507
+	    n508 n509 n510 n511 n512 n513 n514 n515 n516 n517 n518 n519
+	    n520 n521 n522 n523 n524 n525 n526 n527 n528 n529 n530 n531
+	    n532 n533 n534 n535 n536 n537 n538 n539 n540 n541 n542 n543
+	    n544 n545 n546 n547 n548 n549 n550 n551 n552 n553 n554 n555
+	    n556 n557 n558 n559 n560 n561 n562 n563 n564 n565 n566 n567
+	    n568 n569 n570 n571 n572 n573 n574 n575 n576 n577 n578 n579
+	    n580 n581 n582 n583 n584 n585 n586 n587 n588 n589 n590 n591
+	    n592 n593 n594 n595 n596 n597 n598 n599 n600 n601 n602 n603
+	    n604 n605 n606 n607 n608 n609 n610 n611 n612 n613 n614 n615
+	    n616 n617 n618 n619 n620 n621 n622 n623 n624 n625 n626 n627
+	    n628 n629 n630 n631 n632 n633 n634 n635 n636 n637 n638 n639
+	    n640 n641 n642 n643 n644 n645 n646 n647 n648 n649 n650 n651
+	    n652 n653 n654 n655 n656 n657 n658 n659 n660 n661 n662 n663
+	    n664 n665 n666 n667 n668 n669 n670 n671 n672 n673 n674 n675
+	    n676 n677 n678 n679 n680 n681 n682 n683 n684 n685 n686 n687
+	    n688 n689 n690 n691 n692 n693 n694 n695 n696 n697 n698 n699
+	    n700 n701 n702 n703 n704 n705 n706 n707 n708 n709 n710 n711
+	    n712 n713 n714 n715 n716 n717 n718 n719 n720 n721 n722 n723
+	    n724 n725 n726 n727 n728 n729 n730 n731 n732 n733 n734 n735
+	    n736 n737 n738 n739 n740 n741 n742 n743 n744 n745 n746 n747
+	    n748 n749 n750 n751 n752 n753 n754 n755 n756 n757 n758 n759
+	    n760 n761 n762 n763 n764 n765 n766 n767 n768 n769 n770 n771
+	    n772 n773 n774 n775 n776 n777 n778 n779 n780 n781 n782 n783
+	    n784 n785 n786 n787 n788 n789 n790 n791 n792 n793 n794 n795
+	    n796 n797 n798 n799 n800 n801 n802 n803 n804 n805 n806 n807
+	    n808 n809 n810 n811 n812 n813 n814 n815 n816 n817 n818 n819
+	    n820 n821 n822 n823 n824 n825 n826 n827 n828 n829 n830 n831
+	    n832 n833 n834 n835 n836 n837 n838 n839 n840 n841 n842 n843
+	    n844 n845 n846 n847 n848 n849 n850 n851 n852 n853 n854 n855
+	    n856 n857 n858 n859 n860 n861 n862 n863 n864 n865 n866 n867
+	    n868 n869 n870 n871 n872 n873 n874 n875 n876 n877 n878 n879
+	    n880 n881 n882 n883 n884 n885 n886 n887 n888 n889 n890 n891
+	    n892 n893 n894 n895 n896 n897 n898 n899 n900 n901 n902 n903
+	    n904 n905 n906 n907 n908 n909 n910 n911 n912 n913 n914 n915
+	    n916 n917 n918 n919 n920 n921 n922 n923 n924 n925 n926 n927
+	    n928 n929 n930 n931 n932 n933 n934 n935 n936 n937 n938 n939
+	    n940 n941 n942 n943 n944 n945 n946 n947 n948 n949 n950 n951
+	    n952 n953 n954 n955 n956 n957 n958 n959 n960 n961 n962 n963
+	    n964 n965 n966 n967 n968 n969 n970 n971 n972 n973 n974 n975
+	    n976 n977 n978 n979 n980 n981 n982 n983 n984 n985 n986 n987
+	    n988 n989 n990 n991 n992 n993 n994 n995 n996 n997 n998 n999
+	    n1000
+%token
+    t1 1 "1"
+    t2 2 "2"
+    t3 3 "3"
+    t4 4 "4"
+    t5 5 "5"
+    t6 6 "6"
+    t7 7 "7"
+    t8 8 "8"
+    t9 9 "9"
+    t10 10 "10"
+    t11 11 "11"
+    t12 12 "12"
+    t13 13 "13"
+    t14 14 "14"
+    t15 15 "15"
+    t16 16 "16"
+    t17 17 "17"
+    t18 18 "18"
+    t19 19 "19"
+    t20 20 "20"
+    t21 21 "21"
+    t22 22 "22"
+    t23 23 "23"
+    t24 24 "24"
+    t25 25 "25"
+    t26 26 "26"
+    t27 27 "27"
+    t28 28 "28"
+    t29 29 "29"
+    t30 30 "30"
+    t31 31 "31"
+    t32 32 "32"
+    t33 33 "33"
+    t34 34 "34"
+    t35 35 "35"
+    t36 36 "36"
+    t37 37 "37"
+    t38 38 "38"
+    t39 39 "39"
+    t40 40 "40"
+    t41 41 "41"
+    t42 42 "42"
+    t43 43 "43"
+    t44 44 "44"
+    t45 45 "45"
+    t46 46 "46"
+    t47 47 "47"
+    t48 48 "48"
+    t49 49 "49"
+    t50 50 "50"
+    t51 51 "51"
+    t52 52 "52"
+    t53 53 "53"
+    t54 54 "54"
+    t55 55 "55"
+    t56 56 "56"
+    t57 57 "57"
+    t58 58 "58"
+    t59 59 "59"
+    t60 60 "60"
+    t61 61 "61"
+    t62 62 "62"
+    t63 63 "63"
+    t64 64 "64"
+    t65 65 "65"
+    t66 66 "66"
+    t67 67 "67"
+    t68 68 "68"
+    t69 69 "69"
+    t70 70 "70"
+    t71 71 "71"
+    t72 72 "72"
+    t73 73 "73"
+    t74 74 "74"
+    t75 75 "75"
+    t76 76 "76"
+    t77 77 "77"
+    t78 78 "78"
+    t79 79 "79"
+    t80 80 "80"
+    t81 81 "81"
+    t82 82 "82"
+    t83 83 "83"
+    t84 84 "84"
+    t85 85 "85"
+    t86 86 "86"
+    t87 87 "87"
+    t88 88 "88"
+    t89 89 "89"
+    t90 90 "90"
+    t91 91 "91"
+    t92 92 "92"
+    t93 93 "93"
+    t94 94 "94"
+    t95 95 "95"
+    t96 96 "96"
+    t97 97 "97"
+    t98 98 "98"
+    t99 99 "99"
+    t100 100 "100"
+    t101 101 "101"
+    t102 102 "102"
+    t103 103 "103"
+    t104 104 "104"
+    t105 105 "105"
+    t106 106 "106"
+    t107 107 "107"
+    t108 108 "108"
+    t109 109 "109"
+    t110 110 "110"
+    t111 111 "111"
+    t112 112 "112"
+    t113 113 "113"
+    t114 114 "114"
+    t115 115 "115"
+    t116 116 "116"
+    t117 117 "117"
+    t118 118 "118"
+    t119 119 "119"
+    t120 120 "120"
+    t121 121 "121"
+    t122 122 "122"
+    t123 123 "123"
+    t124 124 "124"
+    t125 125 "125"
+    t126 126 "126"
+    t127 127 "127"
+    t128 128 "128"
+    t129 129 "129"
+    t130 130 "130"
+    t131 131 "131"
+    t132 132 "132"
+    t133 133 "133"
+    t134 134 "134"
+    t135 135 "135"
+    t136 136 "136"
+    t137 137 "137"
+    t138 138 "138"
+    t139 139 "139"
+    t140 140 "140"
+    t141 141 "141"
+    t142 142 "142"
+    t143 143 "143"
+    t144 144 "144"
+    t145 145 "145"
+    t146 146 "146"
+    t147 147 "147"
+    t148 148 "148"
+    t149 149 "149"
+    t150 150 "150"
+    t151 151 "151"
+    t152 152 "152"
+    t153 153 "153"
+    t154 154 "154"
+    t155 155 "155"
+    t156 156 "156"
+    t157 157 "157"
+    t158 158 "158"
+    t159 159 "159"
+    t160 160 "160"
+    t161 161 "161"
+    t162 162 "162"
+    t163 163 "163"
+    t164 164 "164"
+    t165 165 "165"
+    t166 166 "166"
+    t167 167 "167"
+    t168 168 "168"
+    t169 169 "169"
+    t170 170 "170"
+    t171 171 "171"
+    t172 172 "172"
+    t173 173 "173"
+    t174 174 "174"
+    t175 175 "175"
+    t176 176 "176"
+    t177 177 "177"
+    t178 178 "178"
+    t179 179 "179"
+    t180 180 "180"
+    t181 181 "181"
+    t182 182 "182"
+    t183 183 "183"
+    t184 184 "184"
+    t185 185 "185"
+    t186 186 "186"
+    t187 187 "187"
+    t188 188 "188"
+    t189 189 "189"
+    t190 190 "190"
+    t191 191 "191"
+    t192 192 "192"
+    t193 193 "193"
+    t194 194 "194"
+    t195 195 "195"
+    t196 196 "196"
+    t197 197 "197"
+    t198 198 "198"
+    t199 199 "199"
+    t200 200 "200"
+    t201 201 "201"
+    t202 202 "202"
+    t203 203 "203"
+    t204 204 "204"
+    t205 205 "205"
+    t206 206 "206"
+    t207 207 "207"
+    t208 208 "208"
+    t209 209 "209"
+    t210 210 "210"
+    t211 211 "211"
+    t212 212 "212"
+    t213 213 "213"
+    t214 214 "214"
+    t215 215 "215"
+    t216 216 "216"
+    t217 217 "217"
+    t218 218 "218"
+    t219 219 "219"
+    t220 220 "220"
+    t221 221 "221"
+    t222 222 "222"
+    t223 223 "223"
+    t224 224 "224"
+    t225 225 "225"
+    t226 226 "226"
+    t227 227 "227"
+    t228 228 "228"
+    t229 229 "229"
+    t230 230 "230"
+    t231 231 "231"
+    t232 232 "232"
+    t233 233 "233"
+    t234 234 "234"
+    t235 235 "235"
+    t236 236 "236"
+    t237 237 "237"
+    t238 238 "238"
+    t239 239 "239"
+    t240 240 "240"
+    t241 241 "241"
+    t242 242 "242"
+    t243 243 "243"
+    t244 244 "244"
+    t245 245 "245"
+    t246 246 "246"
+    t247 247 "247"
+    t248 248 "248"
+    t249 249 "249"
+    t250 250 "250"
+    t251 251 "251"
+    t252 252 "252"
+    t253 253 "253"
+    t254 254 "254"
+    t255 255 "255"
+    t256 256 "256"
+    t257 257 "257"
+    t258 258 "258"
+    t259 259 "259"
+    t260 260 "260"
+    t261 261 "261"
+    t262 262 "262"
+    t263 263 "263"
+    t264 264 "264"
+    t265 265 "265"
+    t266 266 "266"
+    t267 267 "267"
+    t268 268 "268"
+    t269 269 "269"
+    t270 270 "270"
+    t271 271 "271"
+    t272 272 "272"
+    t273 273 "273"
+    t274 274 "274"
+    t275 275 "275"
+    t276 276 "276"
+    t277 277 "277"
+    t278 278 "278"
+    t279 279 "279"
+    t280 280 "280"
+    t281 281 "281"
+    t282 282 "282"
+    t283 283 "283"
+    t284 284 "284"
+    t285 285 "285"
+    t286 286 "286"
+    t287 287 "287"
+    t288 288 "288"
+    t289 289 "289"
+    t290 290 "290"
+    t291 291 "291"
+    t292 292 "292"
+    t293 293 "293"
+    t294 294 "294"
+    t295 295 "295"
+    t296 296 "296"
+    t297 297 "297"
+    t298 298 "298"
+    t299 299 "299"
+    t300 300 "300"
+    t301 301 "301"
+    t302 302 "302"
+    t303 303 "303"
+    t304 304 "304"
+    t305 305 "305"
+    t306 306 "306"
+    t307 307 "307"
+    t308 308 "308"
+    t309 309 "309"
+    t310 310 "310"
+    t311 311 "311"
+    t312 312 "312"
+    t313 313 "313"
+    t314 314 "314"
+    t315 315 "315"
+    t316 316 "316"
+    t317 317 "317"
+    t318 318 "318"
+    t319 319 "319"
+    t320 320 "320"
+    t321 321 "321"
+    t322 322 "322"
+    t323 323 "323"
+    t324 324 "324"
+    t325 325 "325"
+    t326 326 "326"
+    t327 327 "327"
+    t328 328 "328"
+    t329 329 "329"
+    t330 330 "330"
+    t331 331 "331"
+    t332 332 "332"
+    t333 333 "333"
+    t334 334 "334"
+    t335 335 "335"
+    t336 336 "336"
+    t337 337 "337"
+    t338 338 "338"
+    t339 339 "339"
+    t340 340 "340"
+    t341 341 "341"
+    t342 342 "342"
+    t343 343 "343"
+    t344 344 "344"
+    t345 345 "345"
+    t346 346 "346"
+    t347 347 "347"
+    t348 348 "348"
+    t349 349 "349"
+    t350 350 "350"
+    t351 351 "351"
+    t352 352 "352"
+    t353 353 "353"
+    t354 354 "354"
+    t355 355 "355"
+    t356 356 "356"
+    t357 357 "357"
+    t358 358 "358"
+    t359 359 "359"
+    t360 360 "360"
+    t361 361 "361"
+    t362 362 "362"
+    t363 363 "363"
+    t364 364 "364"
+    t365 365 "365"
+    t366 366 "366"
+    t367 367 "367"
+    t368 368 "368"
+    t369 369 "369"
+    t370 370 "370"
+    t371 371 "371"
+    t372 372 "372"
+    t373 373 "373"
+    t374 374 "374"
+    t375 375 "375"
+    t376 376 "376"
+    t377 377 "377"
+    t378 378 "378"
+    t379 379 "379"
+    t380 380 "380"
+    t381 381 "381"
+    t382 382 "382"
+    t383 383 "383"
+    t384 384 "384"
+    t385 385 "385"
+    t386 386 "386"
+    t387 387 "387"
+    t388 388 "388"
+    t389 389 "389"
+    t390 390 "390"
+    t391 391 "391"
+    t392 392 "392"
+    t393 393 "393"
+    t394 394 "394"
+    t395 395 "395"
+    t396 396 "396"
+    t397 397 "397"
+    t398 398 "398"
+    t399 399 "399"
+    t400 400 "400"
+    t401 401 "401"
+    t402 402 "402"
+    t403 403 "403"
+    t404 404 "404"
+    t405 405 "405"
+    t406 406 "406"
+    t407 407 "407"
+    t408 408 "408"
+    t409 409 "409"
+    t410 410 "410"
+    t411 411 "411"
+    t412 412 "412"
+    t413 413 "413"
+    t414 414 "414"
+    t415 415 "415"
+    t416 416 "416"
+    t417 417 "417"
+    t418 418 "418"
+    t419 419 "419"
+    t420 420 "420"
+    t421 421 "421"
+    t422 422 "422"
+    t423 423 "423"
+    t424 424 "424"
+    t425 425 "425"
+    t426 426 "426"
+    t427 427 "427"
+    t428 428 "428"
+    t429 429 "429"
+    t430 430 "430"
+    t431 431 "431"
+    t432 432 "432"
+    t433 433 "433"
+    t434 434 "434"
+    t435 435 "435"
+    t436 436 "436"
+    t437 437 "437"
+    t438 438 "438"
+    t439 439 "439"
+    t440 440 "440"
+    t441 441 "441"
+    t442 442 "442"
+    t443 443 "443"
+    t444 444 "444"
+    t445 445 "445"
+    t446 446 "446"
+    t447 447 "447"
+    t448 448 "448"
+    t449 449 "449"
+    t450 450 "450"
+    t451 451 "451"
+    t452 452 "452"
+    t453 453 "453"
+    t454 454 "454"
+    t455 455 "455"
+    t456 456 "456"
+    t457 457 "457"
+    t458 458 "458"
+    t459 459 "459"
+    t460 460 "460"
+    t461 461 "461"
+    t462 462 "462"
+    t463 463 "463"
+    t464 464 "464"
+    t465 465 "465"
+    t466 466 "466"
+    t467 467 "467"
+    t468 468 "468"
+    t469 469 "469"
+    t470 470 "470"
+    t471 471 "471"
+    t472 472 "472"
+    t473 473 "473"
+    t474 474 "474"
+    t475 475 "475"
+    t476 476 "476"
+    t477 477 "477"
+    t478 478 "478"
+    t479 479 "479"
+    t480 480 "480"
+    t481 481 "481"
+    t482 482 "482"
+    t483 483 "483"
+    t484 484 "484"
+    t485 485 "485"
+    t486 486 "486"
+    t487 487 "487"
+    t488 488 "488"
+    t489 489 "489"
+    t490 490 "490"
+    t491 491 "491"
+    t492 492 "492"
+    t493 493 "493"
+    t494 494 "494"
+    t495 495 "495"
+    t496 496 "496"
+    t497 497 "497"
+    t498 498 "498"
+    t499 499 "499"
+    t500 500 "500"
+    t501 501 "501"
+    t502 502 "502"
+    t503 503 "503"
+    t504 504 "504"
+    t505 505 "505"
+    t506 506 "506"
+    t507 507 "507"
+    t508 508 "508"
+    t509 509 "509"
+    t510 510 "510"
+    t511 511 "511"
+    t512 512 "512"
+    t513 513 "513"
+    t514 514 "514"
+    t515 515 "515"
+    t516 516 "516"
+    t517 517 "517"
+    t518 518 "518"
+    t519 519 "519"
+    t520 520 "520"
+    t521 521 "521"
+    t522 522 "522"
+    t523 523 "523"
+    t524 524 "524"
+    t525 525 "525"
+    t526 526 "526"
+    t527 527 "527"
+    t528 528 "528"
+    t529 529 "529"
+    t530 530 "530"
+    t531 531 "531"
+    t532 532 "532"
+    t533 533 "533"
+    t534 534 "534"
+    t535 535 "535"
+    t536 536 "536"
+    t537 537 "537"
+    t538 538 "538"
+    t539 539 "539"
+    t540 540 "540"
+    t541 541 "541"
+    t542 542 "542"
+    t543 543 "543"
+    t544 544 "544"
+    t545 545 "545"
+    t546 546 "546"
+    t547 547 "547"
+    t548 548 "548"
+    t549 549 "549"
+    t550 550 "550"
+    t551 551 "551"
+    t552 552 "552"
+    t553 553 "553"
+    t554 554 "554"
+    t555 555 "555"
+    t556 556 "556"
+    t557 557 "557"
+    t558 558 "558"
+    t559 559 "559"
+    t560 560 "560"
+    t561 561 "561"
+    t562 562 "562"
+    t563 563 "563"
+    t564 564 "564"
+    t565 565 "565"
+    t566 566 "566"
+    t567 567 "567"
+    t568 568 "568"
+    t569 569 "569"
+    t570 570 "570"
+    t571 571 "571"
+    t572 572 "572"
+    t573 573 "573"
+    t574 574 "574"
+    t575 575 "575"
+    t576 576 "576"
+    t577 577 "577"
+    t578 578 "578"
+    t579 579 "579"
+    t580 580 "580"
+    t581 581 "581"
+    t582 582 "582"
+    t583 583 "583"
+    t584 584 "584"
+    t585 585 "585"
+    t586 586 "586"
+    t587 587 "587"
+    t588 588 "588"
+    t589 589 "589"
+    t590 590 "590"
+    t591 591 "591"
+    t592 592 "592"
+    t593 593 "593"
+    t594 594 "594"
+    t595 595 "595"
+    t596 596 "596"
+    t597 597 "597"
+    t598 598 "598"
+    t599 599 "599"
+    t600 600 "600"
+    t601 601 "601"
+    t602 602 "602"
+    t603 603 "603"
+    t604 604 "604"
+    t605 605 "605"
+    t606 606 "606"
+    t607 607 "607"
+    t608 608 "608"
+    t609 609 "609"
+    t610 610 "610"
+    t611 611 "611"
+    t612 612 "612"
+    t613 613 "613"
+    t614 614 "614"
+    t615 615 "615"
+    t616 616 "616"
+    t617 617 "617"
+    t618 618 "618"
+    t619 619 "619"
+    t620 620 "620"
+    t621 621 "621"
+    t622 622 "622"
+    t623 623 "623"
+    t624 624 "624"
+    t625 625 "625"
+    t626 626 "626"
+    t627 627 "627"
+    t628 628 "628"
+    t629 629 "629"
+    t630 630 "630"
+    t631 631 "631"
+    t632 632 "632"
+    t633 633 "633"
+    t634 634 "634"
+    t635 635 "635"
+    t636 636 "636"
+    t637 637 "637"
+    t638 638 "638"
+    t639 639 "639"
+    t640 640 "640"
+    t641 641 "641"
+    t642 642 "642"
+    t643 643 "643"
+    t644 644 "644"
+    t645 645 "645"
+    t646 646 "646"
+    t647 647 "647"
+    t648 648 "648"
+    t649 649 "649"
+    t650 650 "650"
+    t651 651 "651"
+    t652 652 "652"
+    t653 653 "653"
+    t654 654 "654"
+    t655 655 "655"
+    t656 656 "656"
+    t657 657 "657"
+    t658 658 "658"
+    t659 659 "659"
+    t660 660 "660"
+    t661 661 "661"
+    t662 662 "662"
+    t663 663 "663"
+    t664 664 "664"
+    t665 665 "665"
+    t666 666 "666"
+    t667 667 "667"
+    t668 668 "668"
+    t669 669 "669"
+    t670 670 "670"
+    t671 671 "671"
+    t672 672 "672"
+    t673 673 "673"
+    t674 674 "674"
+    t675 675 "675"
+    t676 676 "676"
+    t677 677 "677"
+    t678 678 "678"
+    t679 679 "679"
+    t680 680 "680"
+    t681 681 "681"
+    t682 682 "682"
+    t683 683 "683"
+    t684 684 "684"
+    t685 685 "685"
+    t686 686 "686"
+    t687 687 "687"
+    t688 688 "688"
+    t689 689 "689"
+    t690 690 "690"
+    t691 691 "691"
+    t692 692 "692"
+    t693 693 "693"
+    t694 694 "694"
+    t695 695 "695"
+    t696 696 "696"
+    t697 697 "697"
+    t698 698 "698"
+    t699 699 "699"
+    t700 700 "700"
+    t701 701 "701"
+    t702 702 "702"
+    t703 703 "703"
+    t704 704 "704"
+    t705 705 "705"
+    t706 706 "706"
+    t707 707 "707"
+    t708 708 "708"
+    t709 709 "709"
+    t710 710 "710"
+    t711 711 "711"
+    t712 712 "712"
+    t713 713 "713"
+    t714 714 "714"
+    t715 715 "715"
+    t716 716 "716"
+    t717 717 "717"
+    t718 718 "718"
+    t719 719 "719"
+    t720 720 "720"
+    t721 721 "721"
+    t722 722 "722"
+    t723 723 "723"
+    t724 724 "724"
+    t725 725 "725"
+    t726 726 "726"
+    t727 727 "727"
+    t728 728 "728"
+    t729 729 "729"
+    t730 730 "730"
+    t731 731 "731"
+    t732 732 "732"
+    t733 733 "733"
+    t734 734 "734"
+    t735 735 "735"
+    t736 736 "736"
+    t737 737 "737"
+    t738 738 "738"
+    t739 739 "739"
+    t740 740 "740"
+    t741 741 "741"
+    t742 742 "742"
+    t743 743 "743"
+    t744 744 "744"
+    t745 745 "745"
+    t746 746 "746"
+    t747 747 "747"
+    t748 748 "748"
+    t749 749 "749"
+    t750 750 "750"
+    t751 751 "751"
+    t752 752 "752"
+    t753 753 "753"
+    t754 754 "754"
+    t755 755 "755"
+    t756 756 "756"
+    t757 757 "757"
+    t758 758 "758"
+    t759 759 "759"
+    t760 760 "760"
+    t761 761 "761"
+    t762 762 "762"
+    t763 763 "763"
+    t764 764 "764"
+    t765 765 "765"
+    t766 766 "766"
+    t767 767 "767"
+    t768 768 "768"
+    t769 769 "769"
+    t770 770 "770"
+    t771 771 "771"
+    t772 772 "772"
+    t773 773 "773"
+    t774 774 "774"
+    t775 775 "775"
+    t776 776 "776"
+    t777 777 "777"
+    t778 778 "778"
+    t779 779 "779"
+    t780 780 "780"
+    t781 781 "781"
+    t782 782 "782"
+    t783 783 "783"
+    t784 784 "784"
+    t785 785 "785"
+    t786 786 "786"
+    t787 787 "787"
+    t788 788 "788"
+    t789 789 "789"
+    t790 790 "790"
+    t791 791 "791"
+    t792 792 "792"
+    t793 793 "793"
+    t794 794 "794"
+    t795 795 "795"
+    t796 796 "796"
+    t797 797 "797"
+    t798 798 "798"
+    t799 799 "799"
+    t800 800 "800"
+    t801 801 "801"
+    t802 802 "802"
+    t803 803 "803"
+    t804 804 "804"
+    t805 805 "805"
+    t806 806 "806"
+    t807 807 "807"
+    t808 808 "808"
+    t809 809 "809"
+    t810 810 "810"
+    t811 811 "811"
+    t812 812 "812"
+    t813 813 "813"
+    t814 814 "814"
+    t815 815 "815"
+    t816 816 "816"
+    t817 817 "817"
+    t818 818 "818"
+    t819 819 "819"
+    t820 820 "820"
+    t821 821 "821"
+    t822 822 "822"
+    t823 823 "823"
+    t824 824 "824"
+    t825 825 "825"
+    t826 826 "826"
+    t827 827 "827"
+    t828 828 "828"
+    t829 829 "829"
+    t830 830 "830"
+    t831 831 "831"
+    t832 832 "832"
+    t833 833 "833"
+    t834 834 "834"
+    t835 835 "835"
+    t836 836 "836"
+    t837 837 "837"
+    t838 838 "838"
+    t839 839 "839"
+    t840 840 "840"
+    t841 841 "841"
+    t842 842 "842"
+    t843 843 "843"
+    t844 844 "844"
+    t845 845 "845"
+    t846 846 "846"
+    t847 847 "847"
+    t848 848 "848"
+    t849 849 "849"
+    t850 850 "850"
+    t851 851 "851"
+    t852 852 "852"
+    t853 853 "853"
+    t854 854 "854"
+    t855 855 "855"
+    t856 856 "856"
+    t857 857 "857"
+    t858 858 "858"
+    t859 859 "859"
+    t860 860 "860"
+    t861 861 "861"
+    t862 862 "862"
+    t863 863 "863"
+    t864 864 "864"
+    t865 865 "865"
+    t866 866 "866"
+    t867 867 "867"
+    t868 868 "868"
+    t869 869 "869"
+    t870 870 "870"
+    t871 871 "871"
+    t872 872 "872"
+    t873 873 "873"
+    t874 874 "874"
+    t875 875 "875"
+    t876 876 "876"
+    t877 877 "877"
+    t878 878 "878"
+    t879 879 "879"
+    t880 880 "880"
+    t881 881 "881"
+    t882 882 "882"
+    t883 883 "883"
+    t884 884 "884"
+    t885 885 "885"
+    t886 886 "886"
+    t887 887 "887"
+    t888 888 "888"
+    t889 889 "889"
+    t890 890 "890"
+    t891 891 "891"
+    t892 892 "892"
+    t893 893 "893"
+    t894 894 "894"
+    t895 895 "895"
+    t896 896 "896"
+    t897 897 "897"
+    t898 898 "898"
+    t899 899 "899"
+    t900 900 "900"
+    t901 901 "901"
+    t902 902 "902"
+    t903 903 "903"
+    t904 904 "904"
+    t905 905 "905"
+    t906 906 "906"
+    t907 907 "907"
+    t908 908 "908"
+    t909 909 "909"
+    t910 910 "910"
+    t911 911 "911"
+    t912 912 "912"
+    t913 913 "913"
+    t914 914 "914"
+    t915 915 "915"
+    t916 916 "916"
+    t917 917 "917"
+    t918 918 "918"
+    t919 919 "919"
+    t920 920 "920"
+    t921 921 "921"
+    t922 922 "922"
+    t923 923 "923"
+    t924 924 "924"
+    t925 925 "925"
+    t926 926 "926"
+    t927 927 "927"
+    t928 928 "928"
+    t929 929 "929"
+    t930 930 "930"
+    t931 931 "931"
+    t932 932 "932"
+    t933 933 "933"
+    t934 934 "934"
+    t935 935 "935"
+    t936 936 "936"
+    t937 937 "937"
+    t938 938 "938"
+    t939 939 "939"
+    t940 940 "940"
+    t941 941 "941"
+    t942 942 "942"
+    t943 943 "943"
+    t944 944 "944"
+    t945 945 "945"
+    t946 946 "946"
+    t947 947 "947"
+    t948 948 "948"
+    t949 949 "949"
+    t950 950 "950"
+    t951 951 "951"
+    t952 952 "952"
+    t953 953 "953"
+    t954 954 "954"
+    t955 955 "955"
+    t956 956 "956"
+    t957 957 "957"
+    t958 958 "958"
+    t959 959 "959"
+    t960 960 "960"
+    t961 961 "961"
+    t962 962 "962"
+    t963 963 "963"
+    t964 964 "964"
+    t965 965 "965"
+    t966 966 "966"
+    t967 967 "967"
+    t968 968 "968"
+    t969 969 "969"
+    t970 970 "970"
+    t971 971 "971"
+    t972 972 "972"
+    t973 973 "973"
+    t974 974 "974"
+    t975 975 "975"
+    t976 976 "976"
+    t977 977 "977"
+    t978 978 "978"
+    t979 979 "979"
+    t980 980 "980"
+    t981 981 "981"
+    t982 982 "982"
+    t983 983 "983"
+    t984 984 "984"
+    t985 985 "985"
+    t986 986 "986"
+    t987 987 "987"
+    t988 988 "988"
+    t989 989 "989"
+    t990 990 "990"
+    t991 991 "991"
+    t992 992 "992"
+    t993 993 "993"
+    t994 994 "994"
+    t995 995 "995"
+    t996 996 "996"
+    t997 997 "997"
+    t998 998 "998"
+    t999 999 "999"
+    t1000 1000 "1000"
+%%
+input:
+  exp        { assert ($1 == 1); $$ = $1; }
+| input exp  { assert ($2 == $1 + 1); $$ = $2; }
+;
+
+exp:
+  n1 "1" { assert ($1 == 1); $$ = $1; }
+| n2 "2" { assert ($1 == 2); $$ = $1; }
+| n3 "3" { assert ($1 == 3); $$ = $1; }
+| n4 "4" { assert ($1 == 4); $$ = $1; }
+| n5 "5" { assert ($1 == 5); $$ = $1; }
+| n6 "6" { assert ($1 == 6); $$ = $1; }
+| n7 "7" { assert ($1 == 7); $$ = $1; }
+| n8 "8" { assert ($1 == 8); $$ = $1; }
+| n9 "9" { assert ($1 == 9); $$ = $1; }
+| n10 "10" { assert ($1 == 10); $$ = $1; }
+| n11 "11" { assert ($1 == 11); $$ = $1; }
+| n12 "12" { assert ($1 == 12); $$ = $1; }
+| n13 "13" { assert ($1 == 13); $$ = $1; }
+| n14 "14" { assert ($1 == 14); $$ = $1; }
+| n15 "15" { assert ($1 == 15); $$ = $1; }
+| n16 "16" { assert ($1 == 16); $$ = $1; }
+| n17 "17" { assert ($1 == 17); $$ = $1; }
+| n18 "18" { assert ($1 == 18); $$ = $1; }
+| n19 "19" { assert ($1 == 19); $$ = $1; }
+| n20 "20" { assert ($1 == 20); $$ = $1; }
+| n21 "21" { assert ($1 == 21); $$ = $1; }
+| n22 "22" { assert ($1 == 22); $$ = $1; }
+| n23 "23" { assert ($1 == 23); $$ = $1; }
+| n24 "24" { assert ($1 == 24); $$ = $1; }
+| n25 "25" { assert ($1 == 25); $$ = $1; }
+| n26 "26" { assert ($1 == 26); $$ = $1; }
+| n27 "27" { assert ($1 == 27); $$ = $1; }
+| n28 "28" { assert ($1 == 28); $$ = $1; }
+| n29 "29" { assert ($1 == 29); $$ = $1; }
+| n30 "30" { assert ($1 == 30); $$ = $1; }
+| n31 "31" { assert ($1 == 31); $$ = $1; }
+| n32 "32" { assert ($1 == 32); $$ = $1; }
+| n33 "33" { assert ($1 == 33); $$ = $1; }
+| n34 "34" { assert ($1 == 34); $$ = $1; }
+| n35 "35" { assert ($1 == 35); $$ = $1; }
+| n36 "36" { assert ($1 == 36); $$ = $1; }
+| n37 "37" { assert ($1 == 37); $$ = $1; }
+| n38 "38" { assert ($1 == 38); $$ = $1; }
+| n39 "39" { assert ($1 == 39); $$ = $1; }
+| n40 "40" { assert ($1 == 40); $$ = $1; }
+| n41 "41" { assert ($1 == 41); $$ = $1; }
+| n42 "42" { assert ($1 == 42); $$ = $1; }
+| n43 "43" { assert ($1 == 43); $$ = $1; }
+| n44 "44" { assert ($1 == 44); $$ = $1; }
+| n45 "45" { assert ($1 == 45); $$ = $1; }
+| n46 "46" { assert ($1 == 46); $$ = $1; }
+| n47 "47" { assert ($1 == 47); $$ = $1; }
+| n48 "48" { assert ($1 == 48); $$ = $1; }
+| n49 "49" { assert ($1 == 49); $$ = $1; }
+| n50 "50" { assert ($1 == 50); $$ = $1; }
+| n51 "51" { assert ($1 == 51); $$ = $1; }
+| n52 "52" { assert ($1 == 52); $$ = $1; }
+| n53 "53" { assert ($1 == 53); $$ = $1; }
+| n54 "54" { assert ($1 == 54); $$ = $1; }
+| n55 "55" { assert ($1 == 55); $$ = $1; }
+| n56 "56" { assert ($1 == 56); $$ = $1; }
+| n57 "57" { assert ($1 == 57); $$ = $1; }
+| n58 "58" { assert ($1 == 58); $$ = $1; }
+| n59 "59" { assert ($1 == 59); $$ = $1; }
+| n60 "60" { assert ($1 == 60); $$ = $1; }
+| n61 "61" { assert ($1 == 61); $$ = $1; }
+| n62 "62" { assert ($1 == 62); $$ = $1; }
+| n63 "63" { assert ($1 == 63); $$ = $1; }
+| n64 "64" { assert ($1 == 64); $$ = $1; }
+| n65 "65" { assert ($1 == 65); $$ = $1; }
+| n66 "66" { assert ($1 == 66); $$ = $1; }
+| n67 "67" { assert ($1 == 67); $$ = $1; }
+| n68 "68" { assert ($1 == 68); $$ = $1; }
+| n69 "69" { assert ($1 == 69); $$ = $1; }
+| n70 "70" { assert ($1 == 70); $$ = $1; }
+| n71 "71" { assert ($1 == 71); $$ = $1; }
+| n72 "72" { assert ($1 == 72); $$ = $1; }
+| n73 "73" { assert ($1 == 73); $$ = $1; }
+| n74 "74" { assert ($1 == 74); $$ = $1; }
+| n75 "75" { assert ($1 == 75); $$ = $1; }
+| n76 "76" { assert ($1 == 76); $$ = $1; }
+| n77 "77" { assert ($1 == 77); $$ = $1; }
+| n78 "78" { assert ($1 == 78); $$ = $1; }
+| n79 "79" { assert ($1 == 79); $$ = $1; }
+| n80 "80" { assert ($1 == 80); $$ = $1; }
+| n81 "81" { assert ($1 == 81); $$ = $1; }
+| n82 "82" { assert ($1 == 82); $$ = $1; }
+| n83 "83" { assert ($1 == 83); $$ = $1; }
+| n84 "84" { assert ($1 == 84); $$ = $1; }
+| n85 "85" { assert ($1 == 85); $$ = $1; }
+| n86 "86" { assert ($1 == 86); $$ = $1; }
+| n87 "87" { assert ($1 == 87); $$ = $1; }
+| n88 "88" { assert ($1 == 88); $$ = $1; }
+| n89 "89" { assert ($1 == 89); $$ = $1; }
+| n90 "90" { assert ($1 == 90); $$ = $1; }
+| n91 "91" { assert ($1 == 91); $$ = $1; }
+| n92 "92" { assert ($1 == 92); $$ = $1; }
+| n93 "93" { assert ($1 == 93); $$ = $1; }
+| n94 "94" { assert ($1 == 94); $$ = $1; }
+| n95 "95" { assert ($1 == 95); $$ = $1; }
+| n96 "96" { assert ($1 == 96); $$ = $1; }
+| n97 "97" { assert ($1 == 97); $$ = $1; }
+| n98 "98" { assert ($1 == 98); $$ = $1; }
+| n99 "99" { assert ($1 == 99); $$ = $1; }
+| n100 "100" { assert ($1 == 100); $$ = $1; }
+| n101 "101" { assert ($1 == 101); $$ = $1; }
+| n102 "102" { assert ($1 == 102); $$ = $1; }
+| n103 "103" { assert ($1 == 103); $$ = $1; }
+| n104 "104" { assert ($1 == 104); $$ = $1; }
+| n105 "105" { assert ($1 == 105); $$ = $1; }
+| n106 "106" { assert ($1 == 106); $$ = $1; }
+| n107 "107" { assert ($1 == 107); $$ = $1; }
+| n108 "108" { assert ($1 == 108); $$ = $1; }
+| n109 "109" { assert ($1 == 109); $$ = $1; }
+| n110 "110" { assert ($1 == 110); $$ = $1; }
+| n111 "111" { assert ($1 == 111); $$ = $1; }
+| n112 "112" { assert ($1 == 112); $$ = $1; }
+| n113 "113" { assert ($1 == 113); $$ = $1; }
+| n114 "114" { assert ($1 == 114); $$ = $1; }
+| n115 "115" { assert ($1 == 115); $$ = $1; }
+| n116 "116" { assert ($1 == 116); $$ = $1; }
+| n117 "117" { assert ($1 == 117); $$ = $1; }
+| n118 "118" { assert ($1 == 118); $$ = $1; }
+| n119 "119" { assert ($1 == 119); $$ = $1; }
+| n120 "120" { assert ($1 == 120); $$ = $1; }
+| n121 "121" { assert ($1 == 121); $$ = $1; }
+| n122 "122" { assert ($1 == 122); $$ = $1; }
+| n123 "123" { assert ($1 == 123); $$ = $1; }
+| n124 "124" { assert ($1 == 124); $$ = $1; }
+| n125 "125" { assert ($1 == 125); $$ = $1; }
+| n126 "126" { assert ($1 == 126); $$ = $1; }
+| n127 "127" { assert ($1 == 127); $$ = $1; }
+| n128 "128" { assert ($1 == 128); $$ = $1; }
+| n129 "129" { assert ($1 == 129); $$ = $1; }
+| n130 "130" { assert ($1 == 130); $$ = $1; }
+| n131 "131" { assert ($1 == 131); $$ = $1; }
+| n132 "132" { assert ($1 == 132); $$ = $1; }
+| n133 "133" { assert ($1 == 133); $$ = $1; }
+| n134 "134" { assert ($1 == 134); $$ = $1; }
+| n135 "135" { assert ($1 == 135); $$ = $1; }
+| n136 "136" { assert ($1 == 136); $$ = $1; }
+| n137 "137" { assert ($1 == 137); $$ = $1; }
+| n138 "138" { assert ($1 == 138); $$ = $1; }
+| n139 "139" { assert ($1 == 139); $$ = $1; }
+| n140 "140" { assert ($1 == 140); $$ = $1; }
+| n141 "141" { assert ($1 == 141); $$ = $1; }
+| n142 "142" { assert ($1 == 142); $$ = $1; }
+| n143 "143" { assert ($1 == 143); $$ = $1; }
+| n144 "144" { assert ($1 == 144); $$ = $1; }
+| n145 "145" { assert ($1 == 145); $$ = $1; }
+| n146 "146" { assert ($1 == 146); $$ = $1; }
+| n147 "147" { assert ($1 == 147); $$ = $1; }
+| n148 "148" { assert ($1 == 148); $$ = $1; }
+| n149 "149" { assert ($1 == 149); $$ = $1; }
+| n150 "150" { assert ($1 == 150); $$ = $1; }
+| n151 "151" { assert ($1 == 151); $$ = $1; }
+| n152 "152" { assert ($1 == 152); $$ = $1; }
+| n153 "153" { assert ($1 == 153); $$ = $1; }
+| n154 "154" { assert ($1 == 154); $$ = $1; }
+| n155 "155" { assert ($1 == 155); $$ = $1; }
+| n156 "156" { assert ($1 == 156); $$ = $1; }
+| n157 "157" { assert ($1 == 157); $$ = $1; }
+| n158 "158" { assert ($1 == 158); $$ = $1; }
+| n159 "159" { assert ($1 == 159); $$ = $1; }
+| n160 "160" { assert ($1 == 160); $$ = $1; }
+| n161 "161" { assert ($1 == 161); $$ = $1; }
+| n162 "162" { assert ($1 == 162); $$ = $1; }
+| n163 "163" { assert ($1 == 163); $$ = $1; }
+| n164 "164" { assert ($1 == 164); $$ = $1; }
+| n165 "165" { assert ($1 == 165); $$ = $1; }
+| n166 "166" { assert ($1 == 166); $$ = $1; }
+| n167 "167" { assert ($1 == 167); $$ = $1; }
+| n168 "168" { assert ($1 == 168); $$ = $1; }
+| n169 "169" { assert ($1 == 169); $$ = $1; }
+| n170 "170" { assert ($1 == 170); $$ = $1; }
+| n171 "171" { assert ($1 == 171); $$ = $1; }
+| n172 "172" { assert ($1 == 172); $$ = $1; }
+| n173 "173" { assert ($1 == 173); $$ = $1; }
+| n174 "174" { assert ($1 == 174); $$ = $1; }
+| n175 "175" { assert ($1 == 175); $$ = $1; }
+| n176 "176" { assert ($1 == 176); $$ = $1; }
+| n177 "177" { assert ($1 == 177); $$ = $1; }
+| n178 "178" { assert ($1 == 178); $$ = $1; }
+| n179 "179" { assert ($1 == 179); $$ = $1; }
+| n180 "180" { assert ($1 == 180); $$ = $1; }
+| n181 "181" { assert ($1 == 181); $$ = $1; }
+| n182 "182" { assert ($1 == 182); $$ = $1; }
+| n183 "183" { assert ($1 == 183); $$ = $1; }
+| n184 "184" { assert ($1 == 184); $$ = $1; }
+| n185 "185" { assert ($1 == 185); $$ = $1; }
+| n186 "186" { assert ($1 == 186); $$ = $1; }
+| n187 "187" { assert ($1 == 187); $$ = $1; }
+| n188 "188" { assert ($1 == 188); $$ = $1; }
+| n189 "189" { assert ($1 == 189); $$ = $1; }
+| n190 "190" { assert ($1 == 190); $$ = $1; }
+| n191 "191" { assert ($1 == 191); $$ = $1; }
+| n192 "192" { assert ($1 == 192); $$ = $1; }
+| n193 "193" { assert ($1 == 193); $$ = $1; }
+| n194 "194" { assert ($1 == 194); $$ = $1; }
+| n195 "195" { assert ($1 == 195); $$ = $1; }
+| n196 "196" { assert ($1 == 196); $$ = $1; }
+| n197 "197" { assert ($1 == 197); $$ = $1; }
+| n198 "198" { assert ($1 == 198); $$ = $1; }
+| n199 "199" { assert ($1 == 199); $$ = $1; }
+| n200 "200" { assert ($1 == 200); $$ = $1; }
+| n201 "201" { assert ($1 == 201); $$ = $1; }
+| n202 "202" { assert ($1 == 202); $$ = $1; }
+| n203 "203" { assert ($1 == 203); $$ = $1; }
+| n204 "204" { assert ($1 == 204); $$ = $1; }
+| n205 "205" { assert ($1 == 205); $$ = $1; }
+| n206 "206" { assert ($1 == 206); $$ = $1; }
+| n207 "207" { assert ($1 == 207); $$ = $1; }
+| n208 "208" { assert ($1 == 208); $$ = $1; }
+| n209 "209" { assert ($1 == 209); $$ = $1; }
+| n210 "210" { assert ($1 == 210); $$ = $1; }
+| n211 "211" { assert ($1 == 211); $$ = $1; }
+| n212 "212" { assert ($1 == 212); $$ = $1; }
+| n213 "213" { assert ($1 == 213); $$ = $1; }
+| n214 "214" { assert ($1 == 214); $$ = $1; }
+| n215 "215" { assert ($1 == 215); $$ = $1; }
+| n216 "216" { assert ($1 == 216); $$ = $1; }
+| n217 "217" { assert ($1 == 217); $$ = $1; }
+| n218 "218" { assert ($1 == 218); $$ = $1; }
+| n219 "219" { assert ($1 == 219); $$ = $1; }
+| n220 "220" { assert ($1 == 220); $$ = $1; }
+| n221 "221" { assert ($1 == 221); $$ = $1; }
+| n222 "222" { assert ($1 == 222); $$ = $1; }
+| n223 "223" { assert ($1 == 223); $$ = $1; }
+| n224 "224" { assert ($1 == 224); $$ = $1; }
+| n225 "225" { assert ($1 == 225); $$ = $1; }
+| n226 "226" { assert ($1 == 226); $$ = $1; }
+| n227 "227" { assert ($1 == 227); $$ = $1; }
+| n228 "228" { assert ($1 == 228); $$ = $1; }
+| n229 "229" { assert ($1 == 229); $$ = $1; }
+| n230 "230" { assert ($1 == 230); $$ = $1; }
+| n231 "231" { assert ($1 == 231); $$ = $1; }
+| n232 "232" { assert ($1 == 232); $$ = $1; }
+| n233 "233" { assert ($1 == 233); $$ = $1; }
+| n234 "234" { assert ($1 == 234); $$ = $1; }
+| n235 "235" { assert ($1 == 235); $$ = $1; }
+| n236 "236" { assert ($1 == 236); $$ = $1; }
+| n237 "237" { assert ($1 == 237); $$ = $1; }
+| n238 "238" { assert ($1 == 238); $$ = $1; }
+| n239 "239" { assert ($1 == 239); $$ = $1; }
+| n240 "240" { assert ($1 == 240); $$ = $1; }
+| n241 "241" { assert ($1 == 241); $$ = $1; }
+| n242 "242" { assert ($1 == 242); $$ = $1; }
+| n243 "243" { assert ($1 == 243); $$ = $1; }
+| n244 "244" { assert ($1 == 244); $$ = $1; }
+| n245 "245" { assert ($1 == 245); $$ = $1; }
+| n246 "246" { assert ($1 == 246); $$ = $1; }
+| n247 "247" { assert ($1 == 247); $$ = $1; }
+| n248 "248" { assert ($1 == 248); $$ = $1; }
+| n249 "249" { assert ($1 == 249); $$ = $1; }
+| n250 "250" { assert ($1 == 250); $$ = $1; }
+| n251 "251" { assert ($1 == 251); $$ = $1; }
+| n252 "252" { assert ($1 == 252); $$ = $1; }
+| n253 "253" { assert ($1 == 253); $$ = $1; }
+| n254 "254" { assert ($1 == 254); $$ = $1; }
+| n255 "255" { assert ($1 == 255); $$ = $1; }
+| n256 "256" { assert ($1 == 256); $$ = $1; }
+| n257 "257" { assert ($1 == 257); $$ = $1; }
+| n258 "258" { assert ($1 == 258); $$ = $1; }
+| n259 "259" { assert ($1 == 259); $$ = $1; }
+| n260 "260" { assert ($1 == 260); $$ = $1; }
+| n261 "261" { assert ($1 == 261); $$ = $1; }
+| n262 "262" { assert ($1 == 262); $$ = $1; }
+| n263 "263" { assert ($1 == 263); $$ = $1; }
+| n264 "264" { assert ($1 == 264); $$ = $1; }
+| n265 "265" { assert ($1 == 265); $$ = $1; }
+| n266 "266" { assert ($1 == 266); $$ = $1; }
+| n267 "267" { assert ($1 == 267); $$ = $1; }
+| n268 "268" { assert ($1 == 268); $$ = $1; }
+| n269 "269" { assert ($1 == 269); $$ = $1; }
+| n270 "270" { assert ($1 == 270); $$ = $1; }
+| n271 "271" { assert ($1 == 271); $$ = $1; }
+| n272 "272" { assert ($1 == 272); $$ = $1; }
+| n273 "273" { assert ($1 == 273); $$ = $1; }
+| n274 "274" { assert ($1 == 274); $$ = $1; }
+| n275 "275" { assert ($1 == 275); $$ = $1; }
+| n276 "276" { assert ($1 == 276); $$ = $1; }
+| n277 "277" { assert ($1 == 277); $$ = $1; }
+| n278 "278" { assert ($1 == 278); $$ = $1; }
+| n279 "279" { assert ($1 == 279); $$ = $1; }
+| n280 "280" { assert ($1 == 280); $$ = $1; }
+| n281 "281" { assert ($1 == 281); $$ = $1; }
+| n282 "282" { assert ($1 == 282); $$ = $1; }
+| n283 "283" { assert ($1 == 283); $$ = $1; }
+| n284 "284" { assert ($1 == 284); $$ = $1; }
+| n285 "285" { assert ($1 == 285); $$ = $1; }
+| n286 "286" { assert ($1 == 286); $$ = $1; }
+| n287 "287" { assert ($1 == 287); $$ = $1; }
+| n288 "288" { assert ($1 == 288); $$ = $1; }
+| n289 "289" { assert ($1 == 289); $$ = $1; }
+| n290 "290" { assert ($1 == 290); $$ = $1; }
+| n291 "291" { assert ($1 == 291); $$ = $1; }
+| n292 "292" { assert ($1 == 292); $$ = $1; }
+| n293 "293" { assert ($1 == 293); $$ = $1; }
+| n294 "294" { assert ($1 == 294); $$ = $1; }
+| n295 "295" { assert ($1 == 295); $$ = $1; }
+| n296 "296" { assert ($1 == 296); $$ = $1; }
+| n297 "297" { assert ($1 == 297); $$ = $1; }
+| n298 "298" { assert ($1 == 298); $$ = $1; }
+| n299 "299" { assert ($1 == 299); $$ = $1; }
+| n300 "300" { assert ($1 == 300); $$ = $1; }
+| n301 "301" { assert ($1 == 301); $$ = $1; }
+| n302 "302" { assert ($1 == 302); $$ = $1; }
+| n303 "303" { assert ($1 == 303); $$ = $1; }
+| n304 "304" { assert ($1 == 304); $$ = $1; }
+| n305 "305" { assert ($1 == 305); $$ = $1; }
+| n306 "306" { assert ($1 == 306); $$ = $1; }
+| n307 "307" { assert ($1 == 307); $$ = $1; }
+| n308 "308" { assert ($1 == 308); $$ = $1; }
+| n309 "309" { assert ($1 == 309); $$ = $1; }
+| n310 "310" { assert ($1 == 310); $$ = $1; }
+| n311 "311" { assert ($1 == 311); $$ = $1; }
+| n312 "312" { assert ($1 == 312); $$ = $1; }
+| n313 "313" { assert ($1 == 313); $$ = $1; }
+| n314 "314" { assert ($1 == 314); $$ = $1; }
+| n315 "315" { assert ($1 == 315); $$ = $1; }
+| n316 "316" { assert ($1 == 316); $$ = $1; }
+| n317 "317" { assert ($1 == 317); $$ = $1; }
+| n318 "318" { assert ($1 == 318); $$ = $1; }
+| n319 "319" { assert ($1 == 319); $$ = $1; }
+| n320 "320" { assert ($1 == 320); $$ = $1; }
+| n321 "321" { assert ($1 == 321); $$ = $1; }
+| n322 "322" { assert ($1 == 322); $$ = $1; }
+| n323 "323" { assert ($1 == 323); $$ = $1; }
+| n324 "324" { assert ($1 == 324); $$ = $1; }
+| n325 "325" { assert ($1 == 325); $$ = $1; }
+| n326 "326" { assert ($1 == 326); $$ = $1; }
+| n327 "327" { assert ($1 == 327); $$ = $1; }
+| n328 "328" { assert ($1 == 328); $$ = $1; }
+| n329 "329" { assert ($1 == 329); $$ = $1; }
+| n330 "330" { assert ($1 == 330); $$ = $1; }
+| n331 "331" { assert ($1 == 331); $$ = $1; }
+| n332 "332" { assert ($1 == 332); $$ = $1; }
+| n333 "333" { assert ($1 == 333); $$ = $1; }
+| n334 "334" { assert ($1 == 334); $$ = $1; }
+| n335 "335" { assert ($1 == 335); $$ = $1; }
+| n336 "336" { assert ($1 == 336); $$ = $1; }
+| n337 "337" { assert ($1 == 337); $$ = $1; }
+| n338 "338" { assert ($1 == 338); $$ = $1; }
+| n339 "339" { assert ($1 == 339); $$ = $1; }
+| n340 "340" { assert ($1 == 340); $$ = $1; }
+| n341 "341" { assert ($1 == 341); $$ = $1; }
+| n342 "342" { assert ($1 == 342); $$ = $1; }
+| n343 "343" { assert ($1 == 343); $$ = $1; }
+| n344 "344" { assert ($1 == 344); $$ = $1; }
+| n345 "345" { assert ($1 == 345); $$ = $1; }
+| n346 "346" { assert ($1 == 346); $$ = $1; }
+| n347 "347" { assert ($1 == 347); $$ = $1; }
+| n348 "348" { assert ($1 == 348); $$ = $1; }
+| n349 "349" { assert ($1 == 349); $$ = $1; }
+| n350 "350" { assert ($1 == 350); $$ = $1; }
+| n351 "351" { assert ($1 == 351); $$ = $1; }
+| n352 "352" { assert ($1 == 352); $$ = $1; }
+| n353 "353" { assert ($1 == 353); $$ = $1; }
+| n354 "354" { assert ($1 == 354); $$ = $1; }
+| n355 "355" { assert ($1 == 355); $$ = $1; }
+| n356 "356" { assert ($1 == 356); $$ = $1; }
+| n357 "357" { assert ($1 == 357); $$ = $1; }
+| n358 "358" { assert ($1 == 358); $$ = $1; }
+| n359 "359" { assert ($1 == 359); $$ = $1; }
+| n360 "360" { assert ($1 == 360); $$ = $1; }
+| n361 "361" { assert ($1 == 361); $$ = $1; }
+| n362 "362" { assert ($1 == 362); $$ = $1; }
+| n363 "363" { assert ($1 == 363); $$ = $1; }
+| n364 "364" { assert ($1 == 364); $$ = $1; }
+| n365 "365" { assert ($1 == 365); $$ = $1; }
+| n366 "366" { assert ($1 == 366); $$ = $1; }
+| n367 "367" { assert ($1 == 367); $$ = $1; }
+| n368 "368" { assert ($1 == 368); $$ = $1; }
+| n369 "369" { assert ($1 == 369); $$ = $1; }
+| n370 "370" { assert ($1 == 370); $$ = $1; }
+| n371 "371" { assert ($1 == 371); $$ = $1; }
+| n372 "372" { assert ($1 == 372); $$ = $1; }
+| n373 "373" { assert ($1 == 373); $$ = $1; }
+| n374 "374" { assert ($1 == 374); $$ = $1; }
+| n375 "375" { assert ($1 == 375); $$ = $1; }
+| n376 "376" { assert ($1 == 376); $$ = $1; }
+| n377 "377" { assert ($1 == 377); $$ = $1; }
+| n378 "378" { assert ($1 == 378); $$ = $1; }
+| n379 "379" { assert ($1 == 379); $$ = $1; }
+| n380 "380" { assert ($1 == 380); $$ = $1; }
+| n381 "381" { assert ($1 == 381); $$ = $1; }
+| n382 "382" { assert ($1 == 382); $$ = $1; }
+| n383 "383" { assert ($1 == 383); $$ = $1; }
+| n384 "384" { assert ($1 == 384); $$ = $1; }
+| n385 "385" { assert ($1 == 385); $$ = $1; }
+| n386 "386" { assert ($1 == 386); $$ = $1; }
+| n387 "387" { assert ($1 == 387); $$ = $1; }
+| n388 "388" { assert ($1 == 388); $$ = $1; }
+| n389 "389" { assert ($1 == 389); $$ = $1; }
+| n390 "390" { assert ($1 == 390); $$ = $1; }
+| n391 "391" { assert ($1 == 391); $$ = $1; }
+| n392 "392" { assert ($1 == 392); $$ = $1; }
+| n393 "393" { assert ($1 == 393); $$ = $1; }
+| n394 "394" { assert ($1 == 394); $$ = $1; }
+| n395 "395" { assert ($1 == 395); $$ = $1; }
+| n396 "396" { assert ($1 == 396); $$ = $1; }
+| n397 "397" { assert ($1 == 397); $$ = $1; }
+| n398 "398" { assert ($1 == 398); $$ = $1; }
+| n399 "399" { assert ($1 == 399); $$ = $1; }
+| n400 "400" { assert ($1 == 400); $$ = $1; }
+| n401 "401" { assert ($1 == 401); $$ = $1; }
+| n402 "402" { assert ($1 == 402); $$ = $1; }
+| n403 "403" { assert ($1 == 403); $$ = $1; }
+| n404 "404" { assert ($1 == 404); $$ = $1; }
+| n405 "405" { assert ($1 == 405); $$ = $1; }
+| n406 "406" { assert ($1 == 406); $$ = $1; }
+| n407 "407" { assert ($1 == 407); $$ = $1; }
+| n408 "408" { assert ($1 == 408); $$ = $1; }
+| n409 "409" { assert ($1 == 409); $$ = $1; }
+| n410 "410" { assert ($1 == 410); $$ = $1; }
+| n411 "411" { assert ($1 == 411); $$ = $1; }
+| n412 "412" { assert ($1 == 412); $$ = $1; }
+| n413 "413" { assert ($1 == 413); $$ = $1; }
+| n414 "414" { assert ($1 == 414); $$ = $1; }
+| n415 "415" { assert ($1 == 415); $$ = $1; }
+| n416 "416" { assert ($1 == 416); $$ = $1; }
+| n417 "417" { assert ($1 == 417); $$ = $1; }
+| n418 "418" { assert ($1 == 418); $$ = $1; }
+| n419 "419" { assert ($1 == 419); $$ = $1; }
+| n420 "420" { assert ($1 == 420); $$ = $1; }
+| n421 "421" { assert ($1 == 421); $$ = $1; }
+| n422 "422" { assert ($1 == 422); $$ = $1; }
+| n423 "423" { assert ($1 == 423); $$ = $1; }
+| n424 "424" { assert ($1 == 424); $$ = $1; }
+| n425 "425" { assert ($1 == 425); $$ = $1; }
+| n426 "426" { assert ($1 == 426); $$ = $1; }
+| n427 "427" { assert ($1 == 427); $$ = $1; }
+| n428 "428" { assert ($1 == 428); $$ = $1; }
+| n429 "429" { assert ($1 == 429); $$ = $1; }
+| n430 "430" { assert ($1 == 430); $$ = $1; }
+| n431 "431" { assert ($1 == 431); $$ = $1; }
+| n432 "432" { assert ($1 == 432); $$ = $1; }
+| n433 "433" { assert ($1 == 433); $$ = $1; }
+| n434 "434" { assert ($1 == 434); $$ = $1; }
+| n435 "435" { assert ($1 == 435); $$ = $1; }
+| n436 "436" { assert ($1 == 436); $$ = $1; }
+| n437 "437" { assert ($1 == 437); $$ = $1; }
+| n438 "438" { assert ($1 == 438); $$ = $1; }
+| n439 "439" { assert ($1 == 439); $$ = $1; }
+| n440 "440" { assert ($1 == 440); $$ = $1; }
+| n441 "441" { assert ($1 == 441); $$ = $1; }
+| n442 "442" { assert ($1 == 442); $$ = $1; }
+| n443 "443" { assert ($1 == 443); $$ = $1; }
+| n444 "444" { assert ($1 == 444); $$ = $1; }
+| n445 "445" { assert ($1 == 445); $$ = $1; }
+| n446 "446" { assert ($1 == 446); $$ = $1; }
+| n447 "447" { assert ($1 == 447); $$ = $1; }
+| n448 "448" { assert ($1 == 448); $$ = $1; }
+| n449 "449" { assert ($1 == 449); $$ = $1; }
+| n450 "450" { assert ($1 == 450); $$ = $1; }
+| n451 "451" { assert ($1 == 451); $$ = $1; }
+| n452 "452" { assert ($1 == 452); $$ = $1; }
+| n453 "453" { assert ($1 == 453); $$ = $1; }
+| n454 "454" { assert ($1 == 454); $$ = $1; }
+| n455 "455" { assert ($1 == 455); $$ = $1; }
+| n456 "456" { assert ($1 == 456); $$ = $1; }
+| n457 "457" { assert ($1 == 457); $$ = $1; }
+| n458 "458" { assert ($1 == 458); $$ = $1; }
+| n459 "459" { assert ($1 == 459); $$ = $1; }
+| n460 "460" { assert ($1 == 460); $$ = $1; }
+| n461 "461" { assert ($1 == 461); $$ = $1; }
+| n462 "462" { assert ($1 == 462); $$ = $1; }
+| n463 "463" { assert ($1 == 463); $$ = $1; }
+| n464 "464" { assert ($1 == 464); $$ = $1; }
+| n465 "465" { assert ($1 == 465); $$ = $1; }
+| n466 "466" { assert ($1 == 466); $$ = $1; }
+| n467 "467" { assert ($1 == 467); $$ = $1; }
+| n468 "468" { assert ($1 == 468); $$ = $1; }
+| n469 "469" { assert ($1 == 469); $$ = $1; }
+| n470 "470" { assert ($1 == 470); $$ = $1; }
+| n471 "471" { assert ($1 == 471); $$ = $1; }
+| n472 "472" { assert ($1 == 472); $$ = $1; }
+| n473 "473" { assert ($1 == 473); $$ = $1; }
+| n474 "474" { assert ($1 == 474); $$ = $1; }
+| n475 "475" { assert ($1 == 475); $$ = $1; }
+| n476 "476" { assert ($1 == 476); $$ = $1; }
+| n477 "477" { assert ($1 == 477); $$ = $1; }
+| n478 "478" { assert ($1 == 478); $$ = $1; }
+| n479 "479" { assert ($1 == 479); $$ = $1; }
+| n480 "480" { assert ($1 == 480); $$ = $1; }
+| n481 "481" { assert ($1 == 481); $$ = $1; }
+| n482 "482" { assert ($1 == 482); $$ = $1; }
+| n483 "483" { assert ($1 == 483); $$ = $1; }
+| n484 "484" { assert ($1 == 484); $$ = $1; }
+| n485 "485" { assert ($1 == 485); $$ = $1; }
+| n486 "486" { assert ($1 == 486); $$ = $1; }
+| n487 "487" { assert ($1 == 487); $$ = $1; }
+| n488 "488" { assert ($1 == 488); $$ = $1; }
+| n489 "489" { assert ($1 == 489); $$ = $1; }
+| n490 "490" { assert ($1 == 490); $$ = $1; }
+| n491 "491" { assert ($1 == 491); $$ = $1; }
+| n492 "492" { assert ($1 == 492); $$ = $1; }
+| n493 "493" { assert ($1 == 493); $$ = $1; }
+| n494 "494" { assert ($1 == 494); $$ = $1; }
+| n495 "495" { assert ($1 == 495); $$ = $1; }
+| n496 "496" { assert ($1 == 496); $$ = $1; }
+| n497 "497" { assert ($1 == 497); $$ = $1; }
+| n498 "498" { assert ($1 == 498); $$ = $1; }
+| n499 "499" { assert ($1 == 499); $$ = $1; }
+| n500 "500" { assert ($1 == 500); $$ = $1; }
+| n501 "501" { assert ($1 == 501); $$ = $1; }
+| n502 "502" { assert ($1 == 502); $$ = $1; }
+| n503 "503" { assert ($1 == 503); $$ = $1; }
+| n504 "504" { assert ($1 == 504); $$ = $1; }
+| n505 "505" { assert ($1 == 505); $$ = $1; }
+| n506 "506" { assert ($1 == 506); $$ = $1; }
+| n507 "507" { assert ($1 == 507); $$ = $1; }
+| n508 "508" { assert ($1 == 508); $$ = $1; }
+| n509 "509" { assert ($1 == 509); $$ = $1; }
+| n510 "510" { assert ($1 == 510); $$ = $1; }
+| n511 "511" { assert ($1 == 511); $$ = $1; }
+| n512 "512" { assert ($1 == 512); $$ = $1; }
+| n513 "513" { assert ($1 == 513); $$ = $1; }
+| n514 "514" { assert ($1 == 514); $$ = $1; }
+| n515 "515" { assert ($1 == 515); $$ = $1; }
+| n516 "516" { assert ($1 == 516); $$ = $1; }
+| n517 "517" { assert ($1 == 517); $$ = $1; }
+| n518 "518" { assert ($1 == 518); $$ = $1; }
+| n519 "519" { assert ($1 == 519); $$ = $1; }
+| n520 "520" { assert ($1 == 520); $$ = $1; }
+| n521 "521" { assert ($1 == 521); $$ = $1; }
+| n522 "522" { assert ($1 == 522); $$ = $1; }
+| n523 "523" { assert ($1 == 523); $$ = $1; }
+| n524 "524" { assert ($1 == 524); $$ = $1; }
+| n525 "525" { assert ($1 == 525); $$ = $1; }
+| n526 "526" { assert ($1 == 526); $$ = $1; }
+| n527 "527" { assert ($1 == 527); $$ = $1; }
+| n528 "528" { assert ($1 == 528); $$ = $1; }
+| n529 "529" { assert ($1 == 529); $$ = $1; }
+| n530 "530" { assert ($1 == 530); $$ = $1; }
+| n531 "531" { assert ($1 == 531); $$ = $1; }
+| n532 "532" { assert ($1 == 532); $$ = $1; }
+| n533 "533" { assert ($1 == 533); $$ = $1; }
+| n534 "534" { assert ($1 == 534); $$ = $1; }
+| n535 "535" { assert ($1 == 535); $$ = $1; }
+| n536 "536" { assert ($1 == 536); $$ = $1; }
+| n537 "537" { assert ($1 == 537); $$ = $1; }
+| n538 "538" { assert ($1 == 538); $$ = $1; }
+| n539 "539" { assert ($1 == 539); $$ = $1; }
+| n540 "540" { assert ($1 == 540); $$ = $1; }
+| n541 "541" { assert ($1 == 541); $$ = $1; }
+| n542 "542" { assert ($1 == 542); $$ = $1; }
+| n543 "543" { assert ($1 == 543); $$ = $1; }
+| n544 "544" { assert ($1 == 544); $$ = $1; }
+| n545 "545" { assert ($1 == 545); $$ = $1; }
+| n546 "546" { assert ($1 == 546); $$ = $1; }
+| n547 "547" { assert ($1 == 547); $$ = $1; }
+| n548 "548" { assert ($1 == 548); $$ = $1; }
+| n549 "549" { assert ($1 == 549); $$ = $1; }
+| n550 "550" { assert ($1 == 550); $$ = $1; }
+| n551 "551" { assert ($1 == 551); $$ = $1; }
+| n552 "552" { assert ($1 == 552); $$ = $1; }
+| n553 "553" { assert ($1 == 553); $$ = $1; }
+| n554 "554" { assert ($1 == 554); $$ = $1; }
+| n555 "555" { assert ($1 == 555); $$ = $1; }
+| n556 "556" { assert ($1 == 556); $$ = $1; }
+| n557 "557" { assert ($1 == 557); $$ = $1; }
+| n558 "558" { assert ($1 == 558); $$ = $1; }
+| n559 "559" { assert ($1 == 559); $$ = $1; }
+| n560 "560" { assert ($1 == 560); $$ = $1; }
+| n561 "561" { assert ($1 == 561); $$ = $1; }
+| n562 "562" { assert ($1 == 562); $$ = $1; }
+| n563 "563" { assert ($1 == 563); $$ = $1; }
+| n564 "564" { assert ($1 == 564); $$ = $1; }
+| n565 "565" { assert ($1 == 565); $$ = $1; }
+| n566 "566" { assert ($1 == 566); $$ = $1; }
+| n567 "567" { assert ($1 == 567); $$ = $1; }
+| n568 "568" { assert ($1 == 568); $$ = $1; }
+| n569 "569" { assert ($1 == 569); $$ = $1; }
+| n570 "570" { assert ($1 == 570); $$ = $1; }
+| n571 "571" { assert ($1 == 571); $$ = $1; }
+| n572 "572" { assert ($1 == 572); $$ = $1; }
+| n573 "573" { assert ($1 == 573); $$ = $1; }
+| n574 "574" { assert ($1 == 574); $$ = $1; }
+| n575 "575" { assert ($1 == 575); $$ = $1; }
+| n576 "576" { assert ($1 == 576); $$ = $1; }
+| n577 "577" { assert ($1 == 577); $$ = $1; }
+| n578 "578" { assert ($1 == 578); $$ = $1; }
+| n579 "579" { assert ($1 == 579); $$ = $1; }
+| n580 "580" { assert ($1 == 580); $$ = $1; }
+| n581 "581" { assert ($1 == 581); $$ = $1; }
+| n582 "582" { assert ($1 == 582); $$ = $1; }
+| n583 "583" { assert ($1 == 583); $$ = $1; }
+| n584 "584" { assert ($1 == 584); $$ = $1; }
+| n585 "585" { assert ($1 == 585); $$ = $1; }
+| n586 "586" { assert ($1 == 586); $$ = $1; }
+| n587 "587" { assert ($1 == 587); $$ = $1; }
+| n588 "588" { assert ($1 == 588); $$ = $1; }
+| n589 "589" { assert ($1 == 589); $$ = $1; }
+| n590 "590" { assert ($1 == 590); $$ = $1; }
+| n591 "591" { assert ($1 == 591); $$ = $1; }
+| n592 "592" { assert ($1 == 592); $$ = $1; }
+| n593 "593" { assert ($1 == 593); $$ = $1; }
+| n594 "594" { assert ($1 == 594); $$ = $1; }
+| n595 "595" { assert ($1 == 595); $$ = $1; }
+| n596 "596" { assert ($1 == 596); $$ = $1; }
+| n597 "597" { assert ($1 == 597); $$ = $1; }
+| n598 "598" { assert ($1 == 598); $$ = $1; }
+| n599 "599" { assert ($1 == 599); $$ = $1; }
+| n600 "600" { assert ($1 == 600); $$ = $1; }
+| n601 "601" { assert ($1 == 601); $$ = $1; }
+| n602 "602" { assert ($1 == 602); $$ = $1; }
+| n603 "603" { assert ($1 == 603); $$ = $1; }
+| n604 "604" { assert ($1 == 604); $$ = $1; }
+| n605 "605" { assert ($1 == 605); $$ = $1; }
+| n606 "606" { assert ($1 == 606); $$ = $1; }
+| n607 "607" { assert ($1 == 607); $$ = $1; }
+| n608 "608" { assert ($1 == 608); $$ = $1; }
+| n609 "609" { assert ($1 == 609); $$ = $1; }
+| n610 "610" { assert ($1 == 610); $$ = $1; }
+| n611 "611" { assert ($1 == 611); $$ = $1; }
+| n612 "612" { assert ($1 == 612); $$ = $1; }
+| n613 "613" { assert ($1 == 613); $$ = $1; }
+| n614 "614" { assert ($1 == 614); $$ = $1; }
+| n615 "615" { assert ($1 == 615); $$ = $1; }
+| n616 "616" { assert ($1 == 616); $$ = $1; }
+| n617 "617" { assert ($1 == 617); $$ = $1; }
+| n618 "618" { assert ($1 == 618); $$ = $1; }
+| n619 "619" { assert ($1 == 619); $$ = $1; }
+| n620 "620" { assert ($1 == 620); $$ = $1; }
+| n621 "621" { assert ($1 == 621); $$ = $1; }
+| n622 "622" { assert ($1 == 622); $$ = $1; }
+| n623 "623" { assert ($1 == 623); $$ = $1; }
+| n624 "624" { assert ($1 == 624); $$ = $1; }
+| n625 "625" { assert ($1 == 625); $$ = $1; }
+| n626 "626" { assert ($1 == 626); $$ = $1; }
+| n627 "627" { assert ($1 == 627); $$ = $1; }
+| n628 "628" { assert ($1 == 628); $$ = $1; }
+| n629 "629" { assert ($1 == 629); $$ = $1; }
+| n630 "630" { assert ($1 == 630); $$ = $1; }
+| n631 "631" { assert ($1 == 631); $$ = $1; }
+| n632 "632" { assert ($1 == 632); $$ = $1; }
+| n633 "633" { assert ($1 == 633); $$ = $1; }
+| n634 "634" { assert ($1 == 634); $$ = $1; }
+| n635 "635" { assert ($1 == 635); $$ = $1; }
+| n636 "636" { assert ($1 == 636); $$ = $1; }
+| n637 "637" { assert ($1 == 637); $$ = $1; }
+| n638 "638" { assert ($1 == 638); $$ = $1; }
+| n639 "639" { assert ($1 == 639); $$ = $1; }
+| n640 "640" { assert ($1 == 640); $$ = $1; }
+| n641 "641" { assert ($1 == 641); $$ = $1; }
+| n642 "642" { assert ($1 == 642); $$ = $1; }
+| n643 "643" { assert ($1 == 643); $$ = $1; }
+| n644 "644" { assert ($1 == 644); $$ = $1; }
+| n645 "645" { assert ($1 == 645); $$ = $1; }
+| n646 "646" { assert ($1 == 646); $$ = $1; }
+| n647 "647" { assert ($1 == 647); $$ = $1; }
+| n648 "648" { assert ($1 == 648); $$ = $1; }
+| n649 "649" { assert ($1 == 649); $$ = $1; }
+| n650 "650" { assert ($1 == 650); $$ = $1; }
+| n651 "651" { assert ($1 == 651); $$ = $1; }
+| n652 "652" { assert ($1 == 652); $$ = $1; }
+| n653 "653" { assert ($1 == 653); $$ = $1; }
+| n654 "654" { assert ($1 == 654); $$ = $1; }
+| n655 "655" { assert ($1 == 655); $$ = $1; }
+| n656 "656" { assert ($1 == 656); $$ = $1; }
+| n657 "657" { assert ($1 == 657); $$ = $1; }
+| n658 "658" { assert ($1 == 658); $$ = $1; }
+| n659 "659" { assert ($1 == 659); $$ = $1; }
+| n660 "660" { assert ($1 == 660); $$ = $1; }
+| n661 "661" { assert ($1 == 661); $$ = $1; }
+| n662 "662" { assert ($1 == 662); $$ = $1; }
+| n663 "663" { assert ($1 == 663); $$ = $1; }
+| n664 "664" { assert ($1 == 664); $$ = $1; }
+| n665 "665" { assert ($1 == 665); $$ = $1; }
+| n666 "666" { assert ($1 == 666); $$ = $1; }
+| n667 "667" { assert ($1 == 667); $$ = $1; }
+| n668 "668" { assert ($1 == 668); $$ = $1; }
+| n669 "669" { assert ($1 == 669); $$ = $1; }
+| n670 "670" { assert ($1 == 670); $$ = $1; }
+| n671 "671" { assert ($1 == 671); $$ = $1; }
+| n672 "672" { assert ($1 == 672); $$ = $1; }
+| n673 "673" { assert ($1 == 673); $$ = $1; }
+| n674 "674" { assert ($1 == 674); $$ = $1; }
+| n675 "675" { assert ($1 == 675); $$ = $1; }
+| n676 "676" { assert ($1 == 676); $$ = $1; }
+| n677 "677" { assert ($1 == 677); $$ = $1; }
+| n678 "678" { assert ($1 == 678); $$ = $1; }
+| n679 "679" { assert ($1 == 679); $$ = $1; }
+| n680 "680" { assert ($1 == 680); $$ = $1; }
+| n681 "681" { assert ($1 == 681); $$ = $1; }
+| n682 "682" { assert ($1 == 682); $$ = $1; }
+| n683 "683" { assert ($1 == 683); $$ = $1; }
+| n684 "684" { assert ($1 == 684); $$ = $1; }
+| n685 "685" { assert ($1 == 685); $$ = $1; }
+| n686 "686" { assert ($1 == 686); $$ = $1; }
+| n687 "687" { assert ($1 == 687); $$ = $1; }
+| n688 "688" { assert ($1 == 688); $$ = $1; }
+| n689 "689" { assert ($1 == 689); $$ = $1; }
+| n690 "690" { assert ($1 == 690); $$ = $1; }
+| n691 "691" { assert ($1 == 691); $$ = $1; }
+| n692 "692" { assert ($1 == 692); $$ = $1; }
+| n693 "693" { assert ($1 == 693); $$ = $1; }
+| n694 "694" { assert ($1 == 694); $$ = $1; }
+| n695 "695" { assert ($1 == 695); $$ = $1; }
+| n696 "696" { assert ($1 == 696); $$ = $1; }
+| n697 "697" { assert ($1 == 697); $$ = $1; }
+| n698 "698" { assert ($1 == 698); $$ = $1; }
+| n699 "699" { assert ($1 == 699); $$ = $1; }
+| n700 "700" { assert ($1 == 700); $$ = $1; }
+| n701 "701" { assert ($1 == 701); $$ = $1; }
+| n702 "702" { assert ($1 == 702); $$ = $1; }
+| n703 "703" { assert ($1 == 703); $$ = $1; }
+| n704 "704" { assert ($1 == 704); $$ = $1; }
+| n705 "705" { assert ($1 == 705); $$ = $1; }
+| n706 "706" { assert ($1 == 706); $$ = $1; }
+| n707 "707" { assert ($1 == 707); $$ = $1; }
+| n708 "708" { assert ($1 == 708); $$ = $1; }
+| n709 "709" { assert ($1 == 709); $$ = $1; }
+| n710 "710" { assert ($1 == 710); $$ = $1; }
+| n711 "711" { assert ($1 == 711); $$ = $1; }
+| n712 "712" { assert ($1 == 712); $$ = $1; }
+| n713 "713" { assert ($1 == 713); $$ = $1; }
+| n714 "714" { assert ($1 == 714); $$ = $1; }
+| n715 "715" { assert ($1 == 715); $$ = $1; }
+| n716 "716" { assert ($1 == 716); $$ = $1; }
+| n717 "717" { assert ($1 == 717); $$ = $1; }
+| n718 "718" { assert ($1 == 718); $$ = $1; }
+| n719 "719" { assert ($1 == 719); $$ = $1; }
+| n720 "720" { assert ($1 == 720); $$ = $1; }
+| n721 "721" { assert ($1 == 721); $$ = $1; }
+| n722 "722" { assert ($1 == 722); $$ = $1; }
+| n723 "723" { assert ($1 == 723); $$ = $1; }
+| n724 "724" { assert ($1 == 724); $$ = $1; }
+| n725 "725" { assert ($1 == 725); $$ = $1; }
+| n726 "726" { assert ($1 == 726); $$ = $1; }
+| n727 "727" { assert ($1 == 727); $$ = $1; }
+| n728 "728" { assert ($1 == 728); $$ = $1; }
+| n729 "729" { assert ($1 == 729); $$ = $1; }
+| n730 "730" { assert ($1 == 730); $$ = $1; }
+| n731 "731" { assert ($1 == 731); $$ = $1; }
+| n732 "732" { assert ($1 == 732); $$ = $1; }
+| n733 "733" { assert ($1 == 733); $$ = $1; }
+| n734 "734" { assert ($1 == 734); $$ = $1; }
+| n735 "735" { assert ($1 == 735); $$ = $1; }
+| n736 "736" { assert ($1 == 736); $$ = $1; }
+| n737 "737" { assert ($1 == 737); $$ = $1; }
+| n738 "738" { assert ($1 == 738); $$ = $1; }
+| n739 "739" { assert ($1 == 739); $$ = $1; }
+| n740 "740" { assert ($1 == 740); $$ = $1; }
+| n741 "741" { assert ($1 == 741); $$ = $1; }
+| n742 "742" { assert ($1 == 742); $$ = $1; }
+| n743 "743" { assert ($1 == 743); $$ = $1; }
+| n744 "744" { assert ($1 == 744); $$ = $1; }
+| n745 "745" { assert ($1 == 745); $$ = $1; }
+| n746 "746" { assert ($1 == 746); $$ = $1; }
+| n747 "747" { assert ($1 == 747); $$ = $1; }
+| n748 "748" { assert ($1 == 748); $$ = $1; }
+| n749 "749" { assert ($1 == 749); $$ = $1; }
+| n750 "750" { assert ($1 == 750); $$ = $1; }
+| n751 "751" { assert ($1 == 751); $$ = $1; }
+| n752 "752" { assert ($1 == 752); $$ = $1; }
+| n753 "753" { assert ($1 == 753); $$ = $1; }
+| n754 "754" { assert ($1 == 754); $$ = $1; }
+| n755 "755" { assert ($1 == 755); $$ = $1; }
+| n756 "756" { assert ($1 == 756); $$ = $1; }
+| n757 "757" { assert ($1 == 757); $$ = $1; }
+| n758 "758" { assert ($1 == 758); $$ = $1; }
+| n759 "759" { assert ($1 == 759); $$ = $1; }
+| n760 "760" { assert ($1 == 760); $$ = $1; }
+| n761 "761" { assert ($1 == 761); $$ = $1; }
+| n762 "762" { assert ($1 == 762); $$ = $1; }
+| n763 "763" { assert ($1 == 763); $$ = $1; }
+| n764 "764" { assert ($1 == 764); $$ = $1; }
+| n765 "765" { assert ($1 == 765); $$ = $1; }
+| n766 "766" { assert ($1 == 766); $$ = $1; }
+| n767 "767" { assert ($1 == 767); $$ = $1; }
+| n768 "768" { assert ($1 == 768); $$ = $1; }
+| n769 "769" { assert ($1 == 769); $$ = $1; }
+| n770 "770" { assert ($1 == 770); $$ = $1; }
+| n771 "771" { assert ($1 == 771); $$ = $1; }
+| n772 "772" { assert ($1 == 772); $$ = $1; }
+| n773 "773" { assert ($1 == 773); $$ = $1; }
+| n774 "774" { assert ($1 == 774); $$ = $1; }
+| n775 "775" { assert ($1 == 775); $$ = $1; }
+| n776 "776" { assert ($1 == 776); $$ = $1; }
+| n777 "777" { assert ($1 == 777); $$ = $1; }
+| n778 "778" { assert ($1 == 778); $$ = $1; }
+| n779 "779" { assert ($1 == 779); $$ = $1; }
+| n780 "780" { assert ($1 == 780); $$ = $1; }
+| n781 "781" { assert ($1 == 781); $$ = $1; }
+| n782 "782" { assert ($1 == 782); $$ = $1; }
+| n783 "783" { assert ($1 == 783); $$ = $1; }
+| n784 "784" { assert ($1 == 784); $$ = $1; }
+| n785 "785" { assert ($1 == 785); $$ = $1; }
+| n786 "786" { assert ($1 == 786); $$ = $1; }
+| n787 "787" { assert ($1 == 787); $$ = $1; }
+| n788 "788" { assert ($1 == 788); $$ = $1; }
+| n789 "789" { assert ($1 == 789); $$ = $1; }
+| n790 "790" { assert ($1 == 790); $$ = $1; }
+| n791 "791" { assert ($1 == 791); $$ = $1; }
+| n792 "792" { assert ($1 == 792); $$ = $1; }
+| n793 "793" { assert ($1 == 793); $$ = $1; }
+| n794 "794" { assert ($1 == 794); $$ = $1; }
+| n795 "795" { assert ($1 == 795); $$ = $1; }
+| n796 "796" { assert ($1 == 796); $$ = $1; }
+| n797 "797" { assert ($1 == 797); $$ = $1; }
+| n798 "798" { assert ($1 == 798); $$ = $1; }
+| n799 "799" { assert ($1 == 799); $$ = $1; }
+| n800 "800" { assert ($1 == 800); $$ = $1; }
+| n801 "801" { assert ($1 == 801); $$ = $1; }
+| n802 "802" { assert ($1 == 802); $$ = $1; }
+| n803 "803" { assert ($1 == 803); $$ = $1; }
+| n804 "804" { assert ($1 == 804); $$ = $1; }
+| n805 "805" { assert ($1 == 805); $$ = $1; }
+| n806 "806" { assert ($1 == 806); $$ = $1; }
+| n807 "807" { assert ($1 == 807); $$ = $1; }
+| n808 "808" { assert ($1 == 808); $$ = $1; }
+| n809 "809" { assert ($1 == 809); $$ = $1; }
+| n810 "810" { assert ($1 == 810); $$ = $1; }
+| n811 "811" { assert ($1 == 811); $$ = $1; }
+| n812 "812" { assert ($1 == 812); $$ = $1; }
+| n813 "813" { assert ($1 == 813); $$ = $1; }
+| n814 "814" { assert ($1 == 814); $$ = $1; }
+| n815 "815" { assert ($1 == 815); $$ = $1; }
+| n816 "816" { assert ($1 == 816); $$ = $1; }
+| n817 "817" { assert ($1 == 817); $$ = $1; }
+| n818 "818" { assert ($1 == 818); $$ = $1; }
+| n819 "819" { assert ($1 == 819); $$ = $1; }
+| n820 "820" { assert ($1 == 820); $$ = $1; }
+| n821 "821" { assert ($1 == 821); $$ = $1; }
+| n822 "822" { assert ($1 == 822); $$ = $1; }
+| n823 "823" { assert ($1 == 823); $$ = $1; }
+| n824 "824" { assert ($1 == 824); $$ = $1; }
+| n825 "825" { assert ($1 == 825); $$ = $1; }
+| n826 "826" { assert ($1 == 826); $$ = $1; }
+| n827 "827" { assert ($1 == 827); $$ = $1; }
+| n828 "828" { assert ($1 == 828); $$ = $1; }
+| n829 "829" { assert ($1 == 829); $$ = $1; }
+| n830 "830" { assert ($1 == 830); $$ = $1; }
+| n831 "831" { assert ($1 == 831); $$ = $1; }
+| n832 "832" { assert ($1 == 832); $$ = $1; }
+| n833 "833" { assert ($1 == 833); $$ = $1; }
+| n834 "834" { assert ($1 == 834); $$ = $1; }
+| n835 "835" { assert ($1 == 835); $$ = $1; }
+| n836 "836" { assert ($1 == 836); $$ = $1; }
+| n837 "837" { assert ($1 == 837); $$ = $1; }
+| n838 "838" { assert ($1 == 838); $$ = $1; }
+| n839 "839" { assert ($1 == 839); $$ = $1; }
+| n840 "840" { assert ($1 == 840); $$ = $1; }
+| n841 "841" { assert ($1 == 841); $$ = $1; }
+| n842 "842" { assert ($1 == 842); $$ = $1; }
+| n843 "843" { assert ($1 == 843); $$ = $1; }
+| n844 "844" { assert ($1 == 844); $$ = $1; }
+| n845 "845" { assert ($1 == 845); $$ = $1; }
+| n846 "846" { assert ($1 == 846); $$ = $1; }
+| n847 "847" { assert ($1 == 847); $$ = $1; }
+| n848 "848" { assert ($1 == 848); $$ = $1; }
+| n849 "849" { assert ($1 == 849); $$ = $1; }
+| n850 "850" { assert ($1 == 850); $$ = $1; }
+| n851 "851" { assert ($1 == 851); $$ = $1; }
+| n852 "852" { assert ($1 == 852); $$ = $1; }
+| n853 "853" { assert ($1 == 853); $$ = $1; }
+| n854 "854" { assert ($1 == 854); $$ = $1; }
+| n855 "855" { assert ($1 == 855); $$ = $1; }
+| n856 "856" { assert ($1 == 856); $$ = $1; }
+| n857 "857" { assert ($1 == 857); $$ = $1; }
+| n858 "858" { assert ($1 == 858); $$ = $1; }
+| n859 "859" { assert ($1 == 859); $$ = $1; }
+| n860 "860" { assert ($1 == 860); $$ = $1; }
+| n861 "861" { assert ($1 == 861); $$ = $1; }
+| n862 "862" { assert ($1 == 862); $$ = $1; }
+| n863 "863" { assert ($1 == 863); $$ = $1; }
+| n864 "864" { assert ($1 == 864); $$ = $1; }
+| n865 "865" { assert ($1 == 865); $$ = $1; }
+| n866 "866" { assert ($1 == 866); $$ = $1; }
+| n867 "867" { assert ($1 == 867); $$ = $1; }
+| n868 "868" { assert ($1 == 868); $$ = $1; }
+| n869 "869" { assert ($1 == 869); $$ = $1; }
+| n870 "870" { assert ($1 == 870); $$ = $1; }
+| n871 "871" { assert ($1 == 871); $$ = $1; }
+| n872 "872" { assert ($1 == 872); $$ = $1; }
+| n873 "873" { assert ($1 == 873); $$ = $1; }
+| n874 "874" { assert ($1 == 874); $$ = $1; }
+| n875 "875" { assert ($1 == 875); $$ = $1; }
+| n876 "876" { assert ($1 == 876); $$ = $1; }
+| n877 "877" { assert ($1 == 877); $$ = $1; }
+| n878 "878" { assert ($1 == 878); $$ = $1; }
+| n879 "879" { assert ($1 == 879); $$ = $1; }
+| n880 "880" { assert ($1 == 880); $$ = $1; }
+| n881 "881" { assert ($1 == 881); $$ = $1; }
+| n882 "882" { assert ($1 == 882); $$ = $1; }
+| n883 "883" { assert ($1 == 883); $$ = $1; }
+| n884 "884" { assert ($1 == 884); $$ = $1; }
+| n885 "885" { assert ($1 == 885); $$ = $1; }
+| n886 "886" { assert ($1 == 886); $$ = $1; }
+| n887 "887" { assert ($1 == 887); $$ = $1; }
+| n888 "888" { assert ($1 == 888); $$ = $1; }
+| n889 "889" { assert ($1 == 889); $$ = $1; }
+| n890 "890" { assert ($1 == 890); $$ = $1; }
+| n891 "891" { assert ($1 == 891); $$ = $1; }
+| n892 "892" { assert ($1 == 892); $$ = $1; }
+| n893 "893" { assert ($1 == 893); $$ = $1; }
+| n894 "894" { assert ($1 == 894); $$ = $1; }
+| n895 "895" { assert ($1 == 895); $$ = $1; }
+| n896 "896" { assert ($1 == 896); $$ = $1; }
+| n897 "897" { assert ($1 == 897); $$ = $1; }
+| n898 "898" { assert ($1 == 898); $$ = $1; }
+| n899 "899" { assert ($1 == 899); $$ = $1; }
+| n900 "900" { assert ($1 == 900); $$ = $1; }
+| n901 "901" { assert ($1 == 901); $$ = $1; }
+| n902 "902" { assert ($1 == 902); $$ = $1; }
+| n903 "903" { assert ($1 == 903); $$ = $1; }
+| n904 "904" { assert ($1 == 904); $$ = $1; }
+| n905 "905" { assert ($1 == 905); $$ = $1; }
+| n906 "906" { assert ($1 == 906); $$ = $1; }
+| n907 "907" { assert ($1 == 907); $$ = $1; }
+| n908 "908" { assert ($1 == 908); $$ = $1; }
+| n909 "909" { assert ($1 == 909); $$ = $1; }
+| n910 "910" { assert ($1 == 910); $$ = $1; }
+| n911 "911" { assert ($1 == 911); $$ = $1; }
+| n912 "912" { assert ($1 == 912); $$ = $1; }
+| n913 "913" { assert ($1 == 913); $$ = $1; }
+| n914 "914" { assert ($1 == 914); $$ = $1; }
+| n915 "915" { assert ($1 == 915); $$ = $1; }
+| n916 "916" { assert ($1 == 916); $$ = $1; }
+| n917 "917" { assert ($1 == 917); $$ = $1; }
+| n918 "918" { assert ($1 == 918); $$ = $1; }
+| n919 "919" { assert ($1 == 919); $$ = $1; }
+| n920 "920" { assert ($1 == 920); $$ = $1; }
+| n921 "921" { assert ($1 == 921); $$ = $1; }
+| n922 "922" { assert ($1 == 922); $$ = $1; }
+| n923 "923" { assert ($1 == 923); $$ = $1; }
+| n924 "924" { assert ($1 == 924); $$ = $1; }
+| n925 "925" { assert ($1 == 925); $$ = $1; }
+| n926 "926" { assert ($1 == 926); $$ = $1; }
+| n927 "927" { assert ($1 == 927); $$ = $1; }
+| n928 "928" { assert ($1 == 928); $$ = $1; }
+| n929 "929" { assert ($1 == 929); $$ = $1; }
+| n930 "930" { assert ($1 == 930); $$ = $1; }
+| n931 "931" { assert ($1 == 931); $$ = $1; }
+| n932 "932" { assert ($1 == 932); $$ = $1; }
+| n933 "933" { assert ($1 == 933); $$ = $1; }
+| n934 "934" { assert ($1 == 934); $$ = $1; }
+| n935 "935" { assert ($1 == 935); $$ = $1; }
+| n936 "936" { assert ($1 == 936); $$ = $1; }
+| n937 "937" { assert ($1 == 937); $$ = $1; }
+| n938 "938" { assert ($1 == 938); $$ = $1; }
+| n939 "939" { assert ($1 == 939); $$ = $1; }
+| n940 "940" { assert ($1 == 940); $$ = $1; }
+| n941 "941" { assert ($1 == 941); $$ = $1; }
+| n942 "942" { assert ($1 == 942); $$ = $1; }
+| n943 "943" { assert ($1 == 943); $$ = $1; }
+| n944 "944" { assert ($1 == 944); $$ = $1; }
+| n945 "945" { assert ($1 == 945); $$ = $1; }
+| n946 "946" { assert ($1 == 946); $$ = $1; }
+| n947 "947" { assert ($1 == 947); $$ = $1; }
+| n948 "948" { assert ($1 == 948); $$ = $1; }
+| n949 "949" { assert ($1 == 949); $$ = $1; }
+| n950 "950" { assert ($1 == 950); $$ = $1; }
+| n951 "951" { assert ($1 == 951); $$ = $1; }
+| n952 "952" { assert ($1 == 952); $$ = $1; }
+| n953 "953" { assert ($1 == 953); $$ = $1; }
+| n954 "954" { assert ($1 == 954); $$ = $1; }
+| n955 "955" { assert ($1 == 955); $$ = $1; }
+| n956 "956" { assert ($1 == 956); $$ = $1; }
+| n957 "957" { assert ($1 == 957); $$ = $1; }
+| n958 "958" { assert ($1 == 958); $$ = $1; }
+| n959 "959" { assert ($1 == 959); $$ = $1; }
+| n960 "960" { assert ($1 == 960); $$ = $1; }
+| n961 "961" { assert ($1 == 961); $$ = $1; }
+| n962 "962" { assert ($1 == 962); $$ = $1; }
+| n963 "963" { assert ($1 == 963); $$ = $1; }
+| n964 "964" { assert ($1 == 964); $$ = $1; }
+| n965 "965" { assert ($1 == 965); $$ = $1; }
+| n966 "966" { assert ($1 == 966); $$ = $1; }
+| n967 "967" { assert ($1 == 967); $$ = $1; }
+| n968 "968" { assert ($1 == 968); $$ = $1; }
+| n969 "969" { assert ($1 == 969); $$ = $1; }
+| n970 "970" { assert ($1 == 970); $$ = $1; }
+| n971 "971" { assert ($1 == 971); $$ = $1; }
+| n972 "972" { assert ($1 == 972); $$ = $1; }
+| n973 "973" { assert ($1 == 973); $$ = $1; }
+| n974 "974" { assert ($1 == 974); $$ = $1; }
+| n975 "975" { assert ($1 == 975); $$ = $1; }
+| n976 "976" { assert ($1 == 976); $$ = $1; }
+| n977 "977" { assert ($1 == 977); $$ = $1; }
+| n978 "978" { assert ($1 == 978); $$ = $1; }
+| n979 "979" { assert ($1 == 979); $$ = $1; }
+| n980 "980" { assert ($1 == 980); $$ = $1; }
+| n981 "981" { assert ($1 == 981); $$ = $1; }
+| n982 "982" { assert ($1 == 982); $$ = $1; }
+| n983 "983" { assert ($1 == 983); $$ = $1; }
+| n984 "984" { assert ($1 == 984); $$ = $1; }
+| n985 "985" { assert ($1 == 985); $$ = $1; }
+| n986 "986" { assert ($1 == 986); $$ = $1; }
+| n987 "987" { assert ($1 == 987); $$ = $1; }
+| n988 "988" { assert ($1 == 988); $$ = $1; }
+| n989 "989" { assert ($1 == 989); $$ = $1; }
+| n990 "990" { assert ($1 == 990); $$ = $1; }
+| n991 "991" { assert ($1 == 991); $$ = $1; }
+| n992 "992" { assert ($1 == 992); $$ = $1; }
+| n993 "993" { assert ($1 == 993); $$ = $1; }
+| n994 "994" { assert ($1 == 994); $$ = $1; }
+| n995 "995" { assert ($1 == 995); $$ = $1; }
+| n996 "996" { assert ($1 == 996); $$ = $1; }
+| n997 "997" { assert ($1 == 997); $$ = $1; }
+| n998 "998" { assert ($1 == 998); $$ = $1; }
+| n999 "999" { assert ($1 == 999); $$ = $1; }
+| n1000 "1000" { assert ($1 == 1000); $$ = $1; }
+;
+n1: token { $$ = 1; };
+n2: token { $$ = 2; };
+n3: token { $$ = 3; };
+n4: token { $$ = 4; };
+n5: token { $$ = 5; };
+n6: token { $$ = 6; };
+n7: token { $$ = 7; };
+n8: token { $$ = 8; };
+n9: token { $$ = 9; };
+n10: token { $$ = 10; };
+n11: token { $$ = 11; };
+n12: token { $$ = 12; };
+n13: token { $$ = 13; };
+n14: token { $$ = 14; };
+n15: token { $$ = 15; };
+n16: token { $$ = 16; };
+n17: token { $$ = 17; };
+n18: token { $$ = 18; };
+n19: token { $$ = 19; };
+n20: token { $$ = 20; };
+n21: token { $$ = 21; };
+n22: token { $$ = 22; };
+n23: token { $$ = 23; };
+n24: token { $$ = 24; };
+n25: token { $$ = 25; };
+n26: token { $$ = 26; };
+n27: token { $$ = 27; };
+n28: token { $$ = 28; };
+n29: token { $$ = 29; };
+n30: token { $$ = 30; };
+n31: token { $$ = 31; };
+n32: token { $$ = 32; };
+n33: token { $$ = 33; };
+n34: token { $$ = 34; };
+n35: token { $$ = 35; };
+n36: token { $$ = 36; };
+n37: token { $$ = 37; };
+n38: token { $$ = 38; };
+n39: token { $$ = 39; };
+n40: token { $$ = 40; };
+n41: token { $$ = 41; };
+n42: token { $$ = 42; };
+n43: token { $$ = 43; };
+n44: token { $$ = 44; };
+n45: token { $$ = 45; };
+n46: token { $$ = 46; };
+n47: token { $$ = 47; };
+n48: token { $$ = 48; };
+n49: token { $$ = 49; };
+n50: token { $$ = 50; };
+n51: token { $$ = 51; };
+n52: token { $$ = 52; };
+n53: token { $$ = 53; };
+n54: token { $$ = 54; };
+n55: token { $$ = 55; };
+n56: token { $$ = 56; };
+n57: token { $$ = 57; };
+n58: token { $$ = 58; };
+n59: token { $$ = 59; };
+n60: token { $$ = 60; };
+n61: token { $$ = 61; };
+n62: token { $$ = 62; };
+n63: token { $$ = 63; };
+n64: token { $$ = 64; };
+n65: token { $$ = 65; };
+n66: token { $$ = 66; };
+n67: token { $$ = 67; };
+n68: token { $$ = 68; };
+n69: token { $$ = 69; };
+n70: token { $$ = 70; };
+n71: token { $$ = 71; };
+n72: token { $$ = 72; };
+n73: token { $$ = 73; };
+n74: token { $$ = 74; };
+n75: token { $$ = 75; };
+n76: token { $$ = 76; };
+n77: token { $$ = 77; };
+n78: token { $$ = 78; };
+n79: token { $$ = 79; };
+n80: token { $$ = 80; };
+n81: token { $$ = 81; };
+n82: token { $$ = 82; };
+n83: token { $$ = 83; };
+n84: token { $$ = 84; };
+n85: token { $$ = 85; };
+n86: token { $$ = 86; };
+n87: token { $$ = 87; };
+n88: token { $$ = 88; };
+n89: token { $$ = 89; };
+n90: token { $$ = 90; };
+n91: token { $$ = 91; };
+n92: token { $$ = 92; };
+n93: token { $$ = 93; };
+n94: token { $$ = 94; };
+n95: token { $$ = 95; };
+n96: token { $$ = 96; };
+n97: token { $$ = 97; };
+n98: token { $$ = 98; };
+n99: token { $$ = 99; };
+n100: token { $$ = 100; };
+n101: token { $$ = 101; };
+n102: token { $$ = 102; };
+n103: token { $$ = 103; };
+n104: token { $$ = 104; };
+n105: token { $$ = 105; };
+n106: token { $$ = 106; };
+n107: token { $$ = 107; };
+n108: token { $$ = 108; };
+n109: token { $$ = 109; };
+n110: token { $$ = 110; };
+n111: token { $$ = 111; };
+n112: token { $$ = 112; };
+n113: token { $$ = 113; };
+n114: token { $$ = 114; };
+n115: token { $$ = 115; };
+n116: token { $$ = 116; };
+n117: token { $$ = 117; };
+n118: token { $$ = 118; };
+n119: token { $$ = 119; };
+n120: token { $$ = 120; };
+n121: token { $$ = 121; };
+n122: token { $$ = 122; };
+n123: token { $$ = 123; };
+n124: token { $$ = 124; };
+n125: token { $$ = 125; };
+n126: token { $$ = 126; };
+n127: token { $$ = 127; };
+n128: token { $$ = 128; };
+n129: token { $$ = 129; };
+n130: token { $$ = 130; };
+n131: token { $$ = 131; };
+n132: token { $$ = 132; };
+n133: token { $$ = 133; };
+n134: token { $$ = 134; };
+n135: token { $$ = 135; };
+n136: token { $$ = 136; };
+n137: token { $$ = 137; };
+n138: token { $$ = 138; };
+n139: token { $$ = 139; };
+n140: token { $$ = 140; };
+n141: token { $$ = 141; };
+n142: token { $$ = 142; };
+n143: token { $$ = 143; };
+n144: token { $$ = 144; };
+n145: token { $$ = 145; };
+n146: token { $$ = 146; };
+n147: token { $$ = 147; };
+n148: token { $$ = 148; };
+n149: token { $$ = 149; };
+n150: token { $$ = 150; };
+n151: token { $$ = 151; };
+n152: token { $$ = 152; };
+n153: token { $$ = 153; };
+n154: token { $$ = 154; };
+n155: token { $$ = 155; };
+n156: token { $$ = 156; };
+n157: token { $$ = 157; };
+n158: token { $$ = 158; };
+n159: token { $$ = 159; };
+n160: token { $$ = 160; };
+n161: token { $$ = 161; };
+n162: token { $$ = 162; };
+n163: token { $$ = 163; };
+n164: token { $$ = 164; };
+n165: token { $$ = 165; };
+n166: token { $$ = 166; };
+n167: token { $$ = 167; };
+n168: token { $$ = 168; };
+n169: token { $$ = 169; };
+n170: token { $$ = 170; };
+n171: token { $$ = 171; };
+n172: token { $$ = 172; };
+n173: token { $$ = 173; };
+n174: token { $$ = 174; };
+n175: token { $$ = 175; };
+n176: token { $$ = 176; };
+n177: token { $$ = 177; };
+n178: token { $$ = 178; };
+n179: token { $$ = 179; };
+n180: token { $$ = 180; };
+n181: token { $$ = 181; };
+n182: token { $$ = 182; };
+n183: token { $$ = 183; };
+n184: token { $$ = 184; };
+n185: token { $$ = 185; };
+n186: token { $$ = 186; };
+n187: token { $$ = 187; };
+n188: token { $$ = 188; };
+n189: token { $$ = 189; };
+n190: token { $$ = 190; };
+n191: token { $$ = 191; };
+n192: token { $$ = 192; };
+n193: token { $$ = 193; };
+n194: token { $$ = 194; };
+n195: token { $$ = 195; };
+n196: token { $$ = 196; };
+n197: token { $$ = 197; };
+n198: token { $$ = 198; };
+n199: token { $$ = 199; };
+n200: token { $$ = 200; };
+n201: token { $$ = 201; };
+n202: token { $$ = 202; };
+n203: token { $$ = 203; };
+n204: token { $$ = 204; };
+n205: token { $$ = 205; };
+n206: token { $$ = 206; };
+n207: token { $$ = 207; };
+n208: token { $$ = 208; };
+n209: token { $$ = 209; };
+n210: token { $$ = 210; };
+n211: token { $$ = 211; };
+n212: token { $$ = 212; };
+n213: token { $$ = 213; };
+n214: token { $$ = 214; };
+n215: token { $$ = 215; };
+n216: token { $$ = 216; };
+n217: token { $$ = 217; };
+n218: token { $$ = 218; };
+n219: token { $$ = 219; };
+n220: token { $$ = 220; };
+n221: token { $$ = 221; };
+n222: token { $$ = 222; };
+n223: token { $$ = 223; };
+n224: token { $$ = 224; };
+n225: token { $$ = 225; };
+n226: token { $$ = 226; };
+n227: token { $$ = 227; };
+n228: token { $$ = 228; };
+n229: token { $$ = 229; };
+n230: token { $$ = 230; };
+n231: token { $$ = 231; };
+n232: token { $$ = 232; };
+n233: token { $$ = 233; };
+n234: token { $$ = 234; };
+n235: token { $$ = 235; };
+n236: token { $$ = 236; };
+n237: token { $$ = 237; };
+n238: token { $$ = 238; };
+n239: token { $$ = 239; };
+n240: token { $$ = 240; };
+n241: token { $$ = 241; };
+n242: token { $$ = 242; };
+n243: token { $$ = 243; };
+n244: token { $$ = 244; };
+n245: token { $$ = 245; };
+n246: token { $$ = 246; };
+n247: token { $$ = 247; };
+n248: token { $$ = 248; };
+n249: token { $$ = 249; };
+n250: token { $$ = 250; };
+n251: token { $$ = 251; };
+n252: token { $$ = 252; };
+n253: token { $$ = 253; };
+n254: token { $$ = 254; };
+n255: token { $$ = 255; };
+n256: token { $$ = 256; };
+n257: token { $$ = 257; };
+n258: token { $$ = 258; };
+n259: token { $$ = 259; };
+n260: token { $$ = 260; };
+n261: token { $$ = 261; };
+n262: token { $$ = 262; };
+n263: token { $$ = 263; };
+n264: token { $$ = 264; };
+n265: token { $$ = 265; };
+n266: token { $$ = 266; };
+n267: token { $$ = 267; };
+n268: token { $$ = 268; };
+n269: token { $$ = 269; };
+n270: token { $$ = 270; };
+n271: token { $$ = 271; };
+n272: token { $$ = 272; };
+n273: token { $$ = 273; };
+n274: token { $$ = 274; };
+n275: token { $$ = 275; };
+n276: token { $$ = 276; };
+n277: token { $$ = 277; };
+n278: token { $$ = 278; };
+n279: token { $$ = 279; };
+n280: token { $$ = 280; };
+n281: token { $$ = 281; };
+n282: token { $$ = 282; };
+n283: token { $$ = 283; };
+n284: token { $$ = 284; };
+n285: token { $$ = 285; };
+n286: token { $$ = 286; };
+n287: token { $$ = 287; };
+n288: token { $$ = 288; };
+n289: token { $$ = 289; };
+n290: token { $$ = 290; };
+n291: token { $$ = 291; };
+n292: token { $$ = 292; };
+n293: token { $$ = 293; };
+n294: token { $$ = 294; };
+n295: token { $$ = 295; };
+n296: token { $$ = 296; };
+n297: token { $$ = 297; };
+n298: token { $$ = 298; };
+n299: token { $$ = 299; };
+n300: token { $$ = 300; };
+n301: token { $$ = 301; };
+n302: token { $$ = 302; };
+n303: token { $$ = 303; };
+n304: token { $$ = 304; };
+n305: token { $$ = 305; };
+n306: token { $$ = 306; };
+n307: token { $$ = 307; };
+n308: token { $$ = 308; };
+n309: token { $$ = 309; };
+n310: token { $$ = 310; };
+n311: token { $$ = 311; };
+n312: token { $$ = 312; };
+n313: token { $$ = 313; };
+n314: token { $$ = 314; };
+n315: token { $$ = 315; };
+n316: token { $$ = 316; };
+n317: token { $$ = 317; };
+n318: token { $$ = 318; };
+n319: token { $$ = 319; };
+n320: token { $$ = 320; };
+n321: token { $$ = 321; };
+n322: token { $$ = 322; };
+n323: token { $$ = 323; };
+n324: token { $$ = 324; };
+n325: token { $$ = 325; };
+n326: token { $$ = 326; };
+n327: token { $$ = 327; };
+n328: token { $$ = 328; };
+n329: token { $$ = 329; };
+n330: token { $$ = 330; };
+n331: token { $$ = 331; };
+n332: token { $$ = 332; };
+n333: token { $$ = 333; };
+n334: token { $$ = 334; };
+n335: token { $$ = 335; };
+n336: token { $$ = 336; };
+n337: token { $$ = 337; };
+n338: token { $$ = 338; };
+n339: token { $$ = 339; };
+n340: token { $$ = 340; };
+n341: token { $$ = 341; };
+n342: token { $$ = 342; };
+n343: token { $$ = 343; };
+n344: token { $$ = 344; };
+n345: token { $$ = 345; };
+n346: token { $$ = 346; };
+n347: token { $$ = 347; };
+n348: token { $$ = 348; };
+n349: token { $$ = 349; };
+n350: token { $$ = 350; };
+n351: token { $$ = 351; };
+n352: token { $$ = 352; };
+n353: token { $$ = 353; };
+n354: token { $$ = 354; };
+n355: token { $$ = 355; };
+n356: token { $$ = 356; };
+n357: token { $$ = 357; };
+n358: token { $$ = 358; };
+n359: token { $$ = 359; };
+n360: token { $$ = 360; };
+n361: token { $$ = 361; };
+n362: token { $$ = 362; };
+n363: token { $$ = 363; };
+n364: token { $$ = 364; };
+n365: token { $$ = 365; };
+n366: token { $$ = 366; };
+n367: token { $$ = 367; };
+n368: token { $$ = 368; };
+n369: token { $$ = 369; };
+n370: token { $$ = 370; };
+n371: token { $$ = 371; };
+n372: token { $$ = 372; };
+n373: token { $$ = 373; };
+n374: token { $$ = 374; };
+n375: token { $$ = 375; };
+n376: token { $$ = 376; };
+n377: token { $$ = 377; };
+n378: token { $$ = 378; };
+n379: token { $$ = 379; };
+n380: token { $$ = 380; };
+n381: token { $$ = 381; };
+n382: token { $$ = 382; };
+n383: token { $$ = 383; };
+n384: token { $$ = 384; };
+n385: token { $$ = 385; };
+n386: token { $$ = 386; };
+n387: token { $$ = 387; };
+n388: token { $$ = 388; };
+n389: token { $$ = 389; };
+n390: token { $$ = 390; };
+n391: token { $$ = 391; };
+n392: token { $$ = 392; };
+n393: token { $$ = 393; };
+n394: token { $$ = 394; };
+n395: token { $$ = 395; };
+n396: token { $$ = 396; };
+n397: token { $$ = 397; };
+n398: token { $$ = 398; };
+n399: token { $$ = 399; };
+n400: token { $$ = 400; };
+n401: token { $$ = 401; };
+n402: token { $$ = 402; };
+n403: token { $$ = 403; };
+n404: token { $$ = 404; };
+n405: token { $$ = 405; };
+n406: token { $$ = 406; };
+n407: token { $$ = 407; };
+n408: token { $$ = 408; };
+n409: token { $$ = 409; };
+n410: token { $$ = 410; };
+n411: token { $$ = 411; };
+n412: token { $$ = 412; };
+n413: token { $$ = 413; };
+n414: token { $$ = 414; };
+n415: token { $$ = 415; };
+n416: token { $$ = 416; };
+n417: token { $$ = 417; };
+n418: token { $$ = 418; };
+n419: token { $$ = 419; };
+n420: token { $$ = 420; };
+n421: token { $$ = 421; };
+n422: token { $$ = 422; };
+n423: token { $$ = 423; };
+n424: token { $$ = 424; };
+n425: token { $$ = 425; };
+n426: token { $$ = 426; };
+n427: token { $$ = 427; };
+n428: token { $$ = 428; };
+n429: token { $$ = 429; };
+n430: token { $$ = 430; };
+n431: token { $$ = 431; };
+n432: token { $$ = 432; };
+n433: token { $$ = 433; };
+n434: token { $$ = 434; };
+n435: token { $$ = 435; };
+n436: token { $$ = 436; };
+n437: token { $$ = 437; };
+n438: token { $$ = 438; };
+n439: token { $$ = 439; };
+n440: token { $$ = 440; };
+n441: token { $$ = 441; };
+n442: token { $$ = 442; };
+n443: token { $$ = 443; };
+n444: token { $$ = 444; };
+n445: token { $$ = 445; };
+n446: token { $$ = 446; };
+n447: token { $$ = 447; };
+n448: token { $$ = 448; };
+n449: token { $$ = 449; };
+n450: token { $$ = 450; };
+n451: token { $$ = 451; };
+n452: token { $$ = 452; };
+n453: token { $$ = 453; };
+n454: token { $$ = 454; };
+n455: token { $$ = 455; };
+n456: token { $$ = 456; };
+n457: token { $$ = 457; };
+n458: token { $$ = 458; };
+n459: token { $$ = 459; };
+n460: token { $$ = 460; };
+n461: token { $$ = 461; };
+n462: token { $$ = 462; };
+n463: token { $$ = 463; };
+n464: token { $$ = 464; };
+n465: token { $$ = 465; };
+n466: token { $$ = 466; };
+n467: token { $$ = 467; };
+n468: token { $$ = 468; };
+n469: token { $$ = 469; };
+n470: token { $$ = 470; };
+n471: token { $$ = 471; };
+n472: token { $$ = 472; };
+n473: token { $$ = 473; };
+n474: token { $$ = 474; };
+n475: token { $$ = 475; };
+n476: token { $$ = 476; };
+n477: token { $$ = 477; };
+n478: token { $$ = 478; };
+n479: token { $$ = 479; };
+n480: token { $$ = 480; };
+n481: token { $$ = 481; };
+n482: token { $$ = 482; };
+n483: token { $$ = 483; };
+n484: token { $$ = 484; };
+n485: token { $$ = 485; };
+n486: token { $$ = 486; };
+n487: token { $$ = 487; };
+n488: token { $$ = 488; };
+n489: token { $$ = 489; };
+n490: token { $$ = 490; };
+n491: token { $$ = 491; };
+n492: token { $$ = 492; };
+n493: token { $$ = 493; };
+n494: token { $$ = 494; };
+n495: token { $$ = 495; };
+n496: token { $$ = 496; };
+n497: token { $$ = 497; };
+n498: token { $$ = 498; };
+n499: token { $$ = 499; };
+n500: token { $$ = 500; };
+n501: token { $$ = 501; };
+n502: token { $$ = 502; };
+n503: token { $$ = 503; };
+n504: token { $$ = 504; };
+n505: token { $$ = 505; };
+n506: token { $$ = 506; };
+n507: token { $$ = 507; };
+n508: token { $$ = 508; };
+n509: token { $$ = 509; };
+n510: token { $$ = 510; };
+n511: token { $$ = 511; };
+n512: token { $$ = 512; };
+n513: token { $$ = 513; };
+n514: token { $$ = 514; };
+n515: token { $$ = 515; };
+n516: token { $$ = 516; };
+n517: token { $$ = 517; };
+n518: token { $$ = 518; };
+n519: token { $$ = 519; };
+n520: token { $$ = 520; };
+n521: token { $$ = 521; };
+n522: token { $$ = 522; };
+n523: token { $$ = 523; };
+n524: token { $$ = 524; };
+n525: token { $$ = 525; };
+n526: token { $$ = 526; };
+n527: token { $$ = 527; };
+n528: token { $$ = 528; };
+n529: token { $$ = 529; };
+n530: token { $$ = 530; };
+n531: token { $$ = 531; };
+n532: token { $$ = 532; };
+n533: token { $$ = 533; };
+n534: token { $$ = 534; };
+n535: token { $$ = 535; };
+n536: token { $$ = 536; };
+n537: token { $$ = 537; };
+n538: token { $$ = 538; };
+n539: token { $$ = 539; };
+n540: token { $$ = 540; };
+n541: token { $$ = 541; };
+n542: token { $$ = 542; };
+n543: token { $$ = 543; };
+n544: token { $$ = 544; };
+n545: token { $$ = 545; };
+n546: token { $$ = 546; };
+n547: token { $$ = 547; };
+n548: token { $$ = 548; };
+n549: token { $$ = 549; };
+n550: token { $$ = 550; };
+n551: token { $$ = 551; };
+n552: token { $$ = 552; };
+n553: token { $$ = 553; };
+n554: token { $$ = 554; };
+n555: token { $$ = 555; };
+n556: token { $$ = 556; };
+n557: token { $$ = 557; };
+n558: token { $$ = 558; };
+n559: token { $$ = 559; };
+n560: token { $$ = 560; };
+n561: token { $$ = 561; };
+n562: token { $$ = 562; };
+n563: token { $$ = 563; };
+n564: token { $$ = 564; };
+n565: token { $$ = 565; };
+n566: token { $$ = 566; };
+n567: token { $$ = 567; };
+n568: token { $$ = 568; };
+n569: token { $$ = 569; };
+n570: token { $$ = 570; };
+n571: token { $$ = 571; };
+n572: token { $$ = 572; };
+n573: token { $$ = 573; };
+n574: token { $$ = 574; };
+n575: token { $$ = 575; };
+n576: token { $$ = 576; };
+n577: token { $$ = 577; };
+n578: token { $$ = 578; };
+n579: token { $$ = 579; };
+n580: token { $$ = 580; };
+n581: token { $$ = 581; };
+n582: token { $$ = 582; };
+n583: token { $$ = 583; };
+n584: token { $$ = 584; };
+n585: token { $$ = 585; };
+n586: token { $$ = 586; };
+n587: token { $$ = 587; };
+n588: token { $$ = 588; };
+n589: token { $$ = 589; };
+n590: token { $$ = 590; };
+n591: token { $$ = 591; };
+n592: token { $$ = 592; };
+n593: token { $$ = 593; };
+n594: token { $$ = 594; };
+n595: token { $$ = 595; };
+n596: token { $$ = 596; };
+n597: token { $$ = 597; };
+n598: token { $$ = 598; };
+n599: token { $$ = 599; };
+n600: token { $$ = 600; };
+n601: token { $$ = 601; };
+n602: token { $$ = 602; };
+n603: token { $$ = 603; };
+n604: token { $$ = 604; };
+n605: token { $$ = 605; };
+n606: token { $$ = 606; };
+n607: token { $$ = 607; };
+n608: token { $$ = 608; };
+n609: token { $$ = 609; };
+n610: token { $$ = 610; };
+n611: token { $$ = 611; };
+n612: token { $$ = 612; };
+n613: token { $$ = 613; };
+n614: token { $$ = 614; };
+n615: token { $$ = 615; };
+n616: token { $$ = 616; };
+n617: token { $$ = 617; };
+n618: token { $$ = 618; };
+n619: token { $$ = 619; };
+n620: token { $$ = 620; };
+n621: token { $$ = 621; };
+n622: token { $$ = 622; };
+n623: token { $$ = 623; };
+n624: token { $$ = 624; };
+n625: token { $$ = 625; };
+n626: token { $$ = 626; };
+n627: token { $$ = 627; };
+n628: token { $$ = 628; };
+n629: token { $$ = 629; };
+n630: token { $$ = 630; };
+n631: token { $$ = 631; };
+n632: token { $$ = 632; };
+n633: token { $$ = 633; };
+n634: token { $$ = 634; };
+n635: token { $$ = 635; };
+n636: token { $$ = 636; };
+n637: token { $$ = 637; };
+n638: token { $$ = 638; };
+n639: token { $$ = 639; };
+n640: token { $$ = 640; };
+n641: token { $$ = 641; };
+n642: token { $$ = 642; };
+n643: token { $$ = 643; };
+n644: token { $$ = 644; };
+n645: token { $$ = 645; };
+n646: token { $$ = 646; };
+n647: token { $$ = 647; };
+n648: token { $$ = 648; };
+n649: token { $$ = 649; };
+n650: token { $$ = 650; };
+n651: token { $$ = 651; };
+n652: token { $$ = 652; };
+n653: token { $$ = 653; };
+n654: token { $$ = 654; };
+n655: token { $$ = 655; };
+n656: token { $$ = 656; };
+n657: token { $$ = 657; };
+n658: token { $$ = 658; };
+n659: token { $$ = 659; };
+n660: token { $$ = 660; };
+n661: token { $$ = 661; };
+n662: token { $$ = 662; };
+n663: token { $$ = 663; };
+n664: token { $$ = 664; };
+n665: token { $$ = 665; };
+n666: token { $$ = 666; };
+n667: token { $$ = 667; };
+n668: token { $$ = 668; };
+n669: token { $$ = 669; };
+n670: token { $$ = 670; };
+n671: token { $$ = 671; };
+n672: token { $$ = 672; };
+n673: token { $$ = 673; };
+n674: token { $$ = 674; };
+n675: token { $$ = 675; };
+n676: token { $$ = 676; };
+n677: token { $$ = 677; };
+n678: token { $$ = 678; };
+n679: token { $$ = 679; };
+n680: token { $$ = 680; };
+n681: token { $$ = 681; };
+n682: token { $$ = 682; };
+n683: token { $$ = 683; };
+n684: token { $$ = 684; };
+n685: token { $$ = 685; };
+n686: token { $$ = 686; };
+n687: token { $$ = 687; };
+n688: token { $$ = 688; };
+n689: token { $$ = 689; };
+n690: token { $$ = 690; };
+n691: token { $$ = 691; };
+n692: token { $$ = 692; };
+n693: token { $$ = 693; };
+n694: token { $$ = 694; };
+n695: token { $$ = 695; };
+n696: token { $$ = 696; };
+n697: token { $$ = 697; };
+n698: token { $$ = 698; };
+n699: token { $$ = 699; };
+n700: token { $$ = 700; };
+n701: token { $$ = 701; };
+n702: token { $$ = 702; };
+n703: token { $$ = 703; };
+n704: token { $$ = 704; };
+n705: token { $$ = 705; };
+n706: token { $$ = 706; };
+n707: token { $$ = 707; };
+n708: token { $$ = 708; };
+n709: token { $$ = 709; };
+n710: token { $$ = 710; };
+n711: token { $$ = 711; };
+n712: token { $$ = 712; };
+n713: token { $$ = 713; };
+n714: token { $$ = 714; };
+n715: token { $$ = 715; };
+n716: token { $$ = 716; };
+n717: token { $$ = 717; };
+n718: token { $$ = 718; };
+n719: token { $$ = 719; };
+n720: token { $$ = 720; };
+n721: token { $$ = 721; };
+n722: token { $$ = 722; };
+n723: token { $$ = 723; };
+n724: token { $$ = 724; };
+n725: token { $$ = 725; };
+n726: token { $$ = 726; };
+n727: token { $$ = 727; };
+n728: token { $$ = 728; };
+n729: token { $$ = 729; };
+n730: token { $$ = 730; };
+n731: token { $$ = 731; };
+n732: token { $$ = 732; };
+n733: token { $$ = 733; };
+n734: token { $$ = 734; };
+n735: token { $$ = 735; };
+n736: token { $$ = 736; };
+n737: token { $$ = 737; };
+n738: token { $$ = 738; };
+n739: token { $$ = 739; };
+n740: token { $$ = 740; };
+n741: token { $$ = 741; };
+n742: token { $$ = 742; };
+n743: token { $$ = 743; };
+n744: token { $$ = 744; };
+n745: token { $$ = 745; };
+n746: token { $$ = 746; };
+n747: token { $$ = 747; };
+n748: token { $$ = 748; };
+n749: token { $$ = 749; };
+n750: token { $$ = 750; };
+n751: token { $$ = 751; };
+n752: token { $$ = 752; };
+n753: token { $$ = 753; };
+n754: token { $$ = 754; };
+n755: token { $$ = 755; };
+n756: token { $$ = 756; };
+n757: token { $$ = 757; };
+n758: token { $$ = 758; };
+n759: token { $$ = 759; };
+n760: token { $$ = 760; };
+n761: token { $$ = 761; };
+n762: token { $$ = 762; };
+n763: token { $$ = 763; };
+n764: token { $$ = 764; };
+n765: token { $$ = 765; };
+n766: token { $$ = 766; };
+n767: token { $$ = 767; };
+n768: token { $$ = 768; };
+n769: token { $$ = 769; };
+n770: token { $$ = 770; };
+n771: token { $$ = 771; };
+n772: token { $$ = 772; };
+n773: token { $$ = 773; };
+n774: token { $$ = 774; };
+n775: token { $$ = 775; };
+n776: token { $$ = 776; };
+n777: token { $$ = 777; };
+n778: token { $$ = 778; };
+n779: token { $$ = 779; };
+n780: token { $$ = 780; };
+n781: token { $$ = 781; };
+n782: token { $$ = 782; };
+n783: token { $$ = 783; };
+n784: token { $$ = 784; };
+n785: token { $$ = 785; };
+n786: token { $$ = 786; };
+n787: token { $$ = 787; };
+n788: token { $$ = 788; };
+n789: token { $$ = 789; };
+n790: token { $$ = 790; };
+n791: token { $$ = 791; };
+n792: token { $$ = 792; };
+n793: token { $$ = 793; };
+n794: token { $$ = 794; };
+n795: token { $$ = 795; };
+n796: token { $$ = 796; };
+n797: token { $$ = 797; };
+n798: token { $$ = 798; };
+n799: token { $$ = 799; };
+n800: token { $$ = 800; };
+n801: token { $$ = 801; };
+n802: token { $$ = 802; };
+n803: token { $$ = 803; };
+n804: token { $$ = 804; };
+n805: token { $$ = 805; };
+n806: token { $$ = 806; };
+n807: token { $$ = 807; };
+n808: token { $$ = 808; };
+n809: token { $$ = 809; };
+n810: token { $$ = 810; };
+n811: token { $$ = 811; };
+n812: token { $$ = 812; };
+n813: token { $$ = 813; };
+n814: token { $$ = 814; };
+n815: token { $$ = 815; };
+n816: token { $$ = 816; };
+n817: token { $$ = 817; };
+n818: token { $$ = 818; };
+n819: token { $$ = 819; };
+n820: token { $$ = 820; };
+n821: token { $$ = 821; };
+n822: token { $$ = 822; };
+n823: token { $$ = 823; };
+n824: token { $$ = 824; };
+n825: token { $$ = 825; };
+n826: token { $$ = 826; };
+n827: token { $$ = 827; };
+n828: token { $$ = 828; };
+n829: token { $$ = 829; };
+n830: token { $$ = 830; };
+n831: token { $$ = 831; };
+n832: token { $$ = 832; };
+n833: token { $$ = 833; };
+n834: token { $$ = 834; };
+n835: token { $$ = 835; };
+n836: token { $$ = 836; };
+n837: token { $$ = 837; };
+n838: token { $$ = 838; };
+n839: token { $$ = 839; };
+n840: token { $$ = 840; };
+n841: token { $$ = 841; };
+n842: token { $$ = 842; };
+n843: token { $$ = 843; };
+n844: token { $$ = 844; };
+n845: token { $$ = 845; };
+n846: token { $$ = 846; };
+n847: token { $$ = 847; };
+n848: token { $$ = 848; };
+n849: token { $$ = 849; };
+n850: token { $$ = 850; };
+n851: token { $$ = 851; };
+n852: token { $$ = 852; };
+n853: token { $$ = 853; };
+n854: token { $$ = 854; };
+n855: token { $$ = 855; };
+n856: token { $$ = 856; };
+n857: token { $$ = 857; };
+n858: token { $$ = 858; };
+n859: token { $$ = 859; };
+n860: token { $$ = 860; };
+n861: token { $$ = 861; };
+n862: token { $$ = 862; };
+n863: token { $$ = 863; };
+n864: token { $$ = 864; };
+n865: token { $$ = 865; };
+n866: token { $$ = 866; };
+n867: token { $$ = 867; };
+n868: token { $$ = 868; };
+n869: token { $$ = 869; };
+n870: token { $$ = 870; };
+n871: token { $$ = 871; };
+n872: token { $$ = 872; };
+n873: token { $$ = 873; };
+n874: token { $$ = 874; };
+n875: token { $$ = 875; };
+n876: token { $$ = 876; };
+n877: token { $$ = 877; };
+n878: token { $$ = 878; };
+n879: token { $$ = 879; };
+n880: token { $$ = 880; };
+n881: token { $$ = 881; };
+n882: token { $$ = 882; };
+n883: token { $$ = 883; };
+n884: token { $$ = 884; };
+n885: token { $$ = 885; };
+n886: token { $$ = 886; };
+n887: token { $$ = 887; };
+n888: token { $$ = 888; };
+n889: token { $$ = 889; };
+n890: token { $$ = 890; };
+n891: token { $$ = 891; };
+n892: token { $$ = 892; };
+n893: token { $$ = 893; };
+n894: token { $$ = 894; };
+n895: token { $$ = 895; };
+n896: token { $$ = 896; };
+n897: token { $$ = 897; };
+n898: token { $$ = 898; };
+n899: token { $$ = 899; };
+n900: token { $$ = 900; };
+n901: token { $$ = 901; };
+n902: token { $$ = 902; };
+n903: token { $$ = 903; };
+n904: token { $$ = 904; };
+n905: token { $$ = 905; };
+n906: token { $$ = 906; };
+n907: token { $$ = 907; };
+n908: token { $$ = 908; };
+n909: token { $$ = 909; };
+n910: token { $$ = 910; };
+n911: token { $$ = 911; };
+n912: token { $$ = 912; };
+n913: token { $$ = 913; };
+n914: token { $$ = 914; };
+n915: token { $$ = 915; };
+n916: token { $$ = 916; };
+n917: token { $$ = 917; };
+n918: token { $$ = 918; };
+n919: token { $$ = 919; };
+n920: token { $$ = 920; };
+n921: token { $$ = 921; };
+n922: token { $$ = 922; };
+n923: token { $$ = 923; };
+n924: token { $$ = 924; };
+n925: token { $$ = 925; };
+n926: token { $$ = 926; };
+n927: token { $$ = 927; };
+n928: token { $$ = 928; };
+n929: token { $$ = 929; };
+n930: token { $$ = 930; };
+n931: token { $$ = 931; };
+n932: token { $$ = 932; };
+n933: token { $$ = 933; };
+n934: token { $$ = 934; };
+n935: token { $$ = 935; };
+n936: token { $$ = 936; };
+n937: token { $$ = 937; };
+n938: token { $$ = 938; };
+n939: token { $$ = 939; };
+n940: token { $$ = 940; };
+n941: token { $$ = 941; };
+n942: token { $$ = 942; };
+n943: token { $$ = 943; };
+n944: token { $$ = 944; };
+n945: token { $$ = 945; };
+n946: token { $$ = 946; };
+n947: token { $$ = 947; };
+n948: token { $$ = 948; };
+n949: token { $$ = 949; };
+n950: token { $$ = 950; };
+n951: token { $$ = 951; };
+n952: token { $$ = 952; };
+n953: token { $$ = 953; };
+n954: token { $$ = 954; };
+n955: token { $$ = 955; };
+n956: token { $$ = 956; };
+n957: token { $$ = 957; };
+n958: token { $$ = 958; };
+n959: token { $$ = 959; };
+n960: token { $$ = 960; };
+n961: token { $$ = 961; };
+n962: token { $$ = 962; };
+n963: token { $$ = 963; };
+n964: token { $$ = 964; };
+n965: token { $$ = 965; };
+n966: token { $$ = 966; };
+n967: token { $$ = 967; };
+n968: token { $$ = 968; };
+n969: token { $$ = 969; };
+n970: token { $$ = 970; };
+n971: token { $$ = 971; };
+n972: token { $$ = 972; };
+n973: token { $$ = 973; };
+n974: token { $$ = 974; };
+n975: token { $$ = 975; };
+n976: token { $$ = 976; };
+n977: token { $$ = 977; };
+n978: token { $$ = 978; };
+n979: token { $$ = 979; };
+n980: token { $$ = 980; };
+n981: token { $$ = 981; };
+n982: token { $$ = 982; };
+n983: token { $$ = 983; };
+n984: token { $$ = 984; };
+n985: token { $$ = 985; };
+n986: token { $$ = 986; };
+n987: token { $$ = 987; };
+n988: token { $$ = 988; };
+n989: token { $$ = 989; };
+n990: token { $$ = 990; };
+n991: token { $$ = 991; };
+n992: token { $$ = 992; };
+n993: token { $$ = 993; };
+n994: token { $$ = 994; };
+n995: token { $$ = 995; };
+n996: token { $$ = 996; };
+n997: token { $$ = 997; };
+n998: token { $$ = 998; };
+n999: token { $$ = 999; };
+n1000: token { $$ = 1000; };
+%%
+
+
+
+
+/* A C error reporting function.  */
+/* !POSIX */ static
+void yyerror (const char *msg)
+{
+  fprintf (stderr, "%s\n", msg);
+}
+static int
+yylex (void)
+{
+  static int return_token = 1;
+  static int counter = 1;
+  if (counter > MAX)
+    {
+      assert (counter++ == MAX + 1);
+      return 0;
+    }
+  if (return_token)
+    {
+      return_token = 0;
+      return token;
+    }
+  return_token = 1;
+  return counter++;
+}
+
+#include <stdlib.h> /* getenv. */
+#include <string.h> /* strcmp. */
+int
+main (int argc, char const* argv[])
+{
+  (void) argc;
+  (void) argv;
+  return yyparse ();
+}
 Starting parse
 Entering state 0
 Reading a token
@@ -235250,8 +233969,21 @@
 1.3: syntax error, unexpected number
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token number (1.3: 2)
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1491: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
+./torture.at:494: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./torture.at:393: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -v -o input.c input.y
+./calc.at:1491: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -235259,143 +233991,98 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Reading a token
-Next token is token "number" (1.6: 1)
-Error: discarding token "number" (1.6: 1)
+Next token is token '!' (1.2: )
+Shifting token '!' (1.2: )
+Entering state 5
 Reading a token
-Next token is token ')' (1.7: )
+Next token is token '!' (1.3: )
+Shifting token '!' (1.3: )
+Entering state 15
+Reducing stack 0 by rule 16 (line 107):
+   $1 = token '!' (1.2: )
+   $2 = token '!' (1.3: )
+Shifting token error (1.2-3: )
 Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
+Reading a token
+Next token is token ')' (1.4: )
+Shifting token ')' (1.4: )
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+   $2 = token error (1.2-3: )
+   $3 = token ')' (1.4: )
+-> $$ = nterm exp (1.1-4: 1111)
 Entering state 8
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 18
-Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
+Next token is token '+' (1.6: )
+Shifting token '+' (1.6: )
+Entering state 20
 Reading a token
-Now at end of input.
-Shifting token "end of input" (2.1: )
-Entering state 16
-Cleanup: popping token "end of input" (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-stderr:
-Starting parse
-Entering state 0
+Next token is token '(' (1.8: )
+Shifting token '(' (1.8: )
+Entering state 4
 Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
+Next token is token "number" (1.9: 1)
+Shifting token "number" (1.9: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token number (1.3: 2)
-1.3: syntax error, unexpected number
-Error: popping nterm exp (1.1: 1)
-Cleanup: discarding lookahead token number (1.3: 2)
-./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
+   $1 = token "number" (1.9: 1)
+-> $$ = nterm exp (1.9: 1)
+Entering state 12
 Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
+Next token is token "number" (1.11: 2)
+1.11: syntax error, unexpected number
+Error: popping nterm exp (1.9: 1)
+Shifting token error (1.9-11: )
 Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Reading a token
-Next token is token "number" (1.6: 1)
-Error: discarding token "number" (1.6: 1)
+Next token is token "number" (1.11: 2)
+Error: discarding token "number" (1.11: 2)
 Reading a token
-Next token is token ')' (1.7: )
+Next token is token ')' (1.12: )
 Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
+Next token is token ')' (1.12: )
+Shifting token ')' (1.12: )
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
+   $1 = token '(' (1.8: )
+   $2 = token error (1.9-11: )
+   $3 = token ')' (1.12: )
+-> $$ = nterm exp (1.8-12: 1111)
+Entering state 29
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
+Next token is token '=' (1.14: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-4: 1111)
+   $2 = token '+' (1.6: )
+   $3 = nterm exp (1.8-12: 1111)
+-> $$ = nterm exp (1.1-12: 2222)
+Entering state 8
+Next token is token '=' (1.14: )
+Shifting token '=' (1.14: )
 Entering state 18
 Reading a token
-Next token is token "number" (1.11-14: 1111)
-Shifting token "number" (1.11-14: 1111)
+Next token is token "number" (1.16: 1)
+Shifting token "number" (1.16: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
+   $1 = token "number" (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
 Entering state 27
 Reading a token
-Next token is token '\n' (1.15-2.0: )
+Next token is token '\n' (1.17-2.0: )
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+   $1 = nterm exp (1.1-12: 2222)
+   $2 = token '=' (1.14: )
+   $3 = nterm exp (1.16: 1)
+1.1-16: error: 2222 != 1
+-> $$ = nterm exp (1.1-16: 2222)
 Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
@@ -235408,23 +234095,9 @@
 Entering state 16
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1491: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stderr:
-stdout:
-./torture.at:238:  $PREPARSER ./input
-stderr:
-./torture.at:238: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-605. torture.at:216:  ok
-./calc.at:1491: cat stderr
+input:
+  | 1//2
+./calc.at:1491:  $PREPARSER ./calc  input
 ./calc.at:1494: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -235436,9 +234109,6 @@
   }eg
 ' expout || exit 77
 ./calc.at:1494: cat stderr
-input:
-  | 1//2
-./calc.at:1491:  $PREPARSER ./calc  input
 stderr:
 Starting parse
 Entering state 0
@@ -235460,12 +234130,12 @@
 Error: popping token '/' (1.2: )
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '/' (1.3: )
-input:
-
 ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-  | (1 + # + 1) = 1111
+input:
+  | (- *) + (1 2) = 1
 ./calc.at:1494:  $PREPARSER ./calc  input
+stderr:
+stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -235486,7 +234156,6 @@
 Error: popping token '/' (1.2: )
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '/' (1.3: )
-stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -235494,70 +234163,103 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
+Entering state 2
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Reducing stack 0 by rule 15 (line 106):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
+Shifting token error (1.2-4: )
+Entering state 11
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
 Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
+Next token is token ')' (1.5: )
 Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
 Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
 Reading a token
 Next token is token "number" (1.10: 1)
-Error: discarding token "number" (1.10: 1)
+Shifting token "number" (1.10: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
+Entering state 12
 Reading a token
-Next token is token ')' (1.11: )
+Next token is token "number" (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
+Shifting token error (1.10-12: )
 Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
+Next token is token "number" (1.12: 2)
+Error: discarding token "number" (1.12: 2)
+Reading a token
+Next token is token ')' (1.13: )
+Entering state 11
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
+Entering state 29
 Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
+Next token is token '=' (1.15: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
+Entering state 8
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
 Entering state 18
 Reading a token
-Next token is token "number" (1.15-18: 1111)
-Shifting token "number" (1.15-18: 1111)
+Next token is token "number" (1.17: 1)
+Shifting token "number" (1.17: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
+   $1 = token "number" (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
 Entering state 27
 Reading a token
-Next token is token '\n' (1.19-2.0: )
+Next token is token '\n' (1.18-2.0: )
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
 Entering state 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
@@ -235582,6 +234284,7 @@
   }eg
 ' expout || exit 77
 stderr:
+./calc.at:1491: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -235589,70 +234292,103 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
+Entering state 2
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Reducing stack 0 by rule 15 (line 106):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
+Shifting token error (1.2-4: )
+Entering state 11
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
 Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
+Next token is token ')' (1.5: )
 Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
 Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
 Reading a token
 Next token is token "number" (1.10: 1)
-Error: discarding token "number" (1.10: 1)
+Shifting token "number" (1.10: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
+Entering state 12
 Reading a token
-Next token is token ')' (1.11: )
+Next token is token "number" (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
+Shifting token error (1.10-12: )
 Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
+Next token is token "number" (1.12: 2)
+Error: discarding token "number" (1.12: 2)
+Reading a token
+Next token is token ')' (1.13: )
+Entering state 11
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
+Entering state 29
 Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
+Next token is token '=' (1.15: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
+Entering state 8
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
 Entering state 18
 Reading a token
-Next token is token "number" (1.15-18: 1111)
-Shifting token "number" (1.15-18: 1111)
+Next token is token "number" (1.17: 1)
+Shifting token "number" (1.17: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
+   $1 = token "number" (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
 Entering state 27
 Reading a token
-Next token is token '\n' (1.19-2.0: )
+Next token is token '\n' (1.18-2.0: )
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
 Entering state 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
@@ -235665,17 +234401,10 @@
 Entering state 16
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1491: cat stderr
 input:
   | error
 ./calc.at:1491:  $PREPARSER ./calc  input
 stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token invalid token (1.1: )
-1.1: syntax error, unexpected invalid token
-Cleanup: discarding lookahead token invalid token (1.1: )
 ./calc.at:1494: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -235686,7 +234415,14 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+Starting parse
+Entering state 0
+Reading a token
+Next token is token invalid token (1.1: )
+1.1: syntax error, unexpected invalid token
+Cleanup: discarding lookahead token invalid token (1.1: )
 ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1494: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -235694,7 +234430,9 @@
 Next token is token invalid token (1.1: )
 1.1: syntax error, unexpected invalid token
 Cleanup: discarding lookahead token invalid token (1.1: )
-./calc.at:1494: cat stderr
+input:
+  | (* *) + (*) + (*)
+./calc.at:1494:  $PREPARSER ./calc  input
 ./calc.at:1491: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -235705,12 +234443,8 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-input:
-./calc.at:1491: cat stderr
-  | (1 + 1) / (1 - 1)
-./calc.at:1494:  $PREPARSER ./calc  input
 stderr:
-input:
+./calc.at:1491: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -235718,101 +234452,101 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
 Reading a token
-Next token is token "number" (1.6: 1)
-Shifting token "number" (1.6: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 29
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
 Reading a token
-Next token is token ')' (1.7: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+Next token is token ')' (1.5: )
+Entering state 11
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
 Entering state 8
 Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 22
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 20
 Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.12: 1)
-Shifting token "number" (1.12: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
+Entering state 11
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
 Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 19
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.9-11: 1111)
+Entering state 29
 Reading a token
-Next token is token "number" (1.16: 1)
-Shifting token "number" (1.16: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 28
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
+Entering state 8
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
 Reading a token
 Next token is token ')' (1.17: )
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
+Entering state 11
 Next token is token ')' (1.17: )
 Shifting token ')' (1.17: )
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
    $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 31
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 29
 Reading a token
 Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 10 (line 93):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
 Entering state 8
 Next token is token '\n' (1.18-2.0: )
 Shifting token '\n' (1.18-2.0: )
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2)
+   $1 = nterm exp (1.1-17: 3333)
    $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
@@ -235826,11 +234560,8 @@
 Entering state 16
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-  | 1 = 2 = 3
-./calc.at:1491:  $PREPARSER ./calc  input
 ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -235838,101 +234569,101 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.2: 1)
-Shifting token "number" (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
 Reading a token
-Next token is token "number" (1.6: 1)
-Shifting token "number" (1.6: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 29
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
 Reading a token
-Next token is token ')' (1.7: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+Next token is token ')' (1.5: )
+Entering state 11
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
 Entering state 8
 Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 22
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 20
 Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
 Entering state 4
 Reading a token
-Next token is token "number" (1.12: 1)
-Shifting token "number" (1.12: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
+Entering state 11
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
 Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 19
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.9-11: 1111)
+Entering state 29
 Reading a token
-Next token is token "number" (1.16: 1)
-Shifting token "number" (1.16: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token "number" (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 28
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
+Entering state 8
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
 Reading a token
 Next token is token ')' (1.17: )
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
+Entering state 11
 Next token is token ')' (1.17: )
 Shifting token ')' (1.17: )
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
    $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 31
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 29
 Reading a token
 Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 10 (line 93):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
 Entering state 8
 Next token is token '\n' (1.18-2.0: )
 Shifting token '\n' (1.18-2.0: )
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2)
+   $1 = nterm exp (1.1-17: 3333)
    $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
@@ -235946,6 +234677,22 @@
 Entering state 16
 Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./torture.at:140: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+input:
+./calc.at:1494: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+  | 1 = 2 = 3
+./calc.at:1491:  $PREPARSER ./calc  input
+./calc.at:1494: cat stderr
+stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -235975,26 +234722,80 @@
 Error: popping token '=' (1.3: )
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '=' (1.7: )
-stderr:
-stdout:
 ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./torture.at:538: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20
-./calc.at:1494: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-616. existing.at:74: testing GNU AWK 3.1.0 Grammar: LALR(1) ...
+input:
+  | 1 + 2 * 3 + !+ ++
+./calc.at:1494:  $PREPARSER ./calc  input
 stderr:
 stderr:
 Starting parse
 Entering state 0
 Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 20
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Reducing stack 0 by rule 17 (line 108):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+Starting parse
+Entering state 0
+Reading a token
 Next token is token number (1.1: 1)
 Shifting token number (1.1: 1)
 Entering state 1
@@ -236021,11 +234822,8 @@
 Error: popping token '=' (1.3: )
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '=' (1.7: )
-./torture.at:538: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
-./calc.at:1494: cat stderr
+./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./torture.at:541: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900
 ./calc.at:1491: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -236036,18 +234834,145 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-569. calc.at:1494:  ok
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 20
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Reducing stack 0 by rule 17 (line 108):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+input:
 ./calc.at:1491: cat stderr
-./torture.at:541: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1494:  $PREPARSER ./calc  input
 input:
-./torture.at:545: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000
   | 
   | +1
-
 ./calc.at:1491:  $PREPARSER ./calc  input
 stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 20
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Reducing stack 0 by rule 18 (line 109):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
 stderr:
 Starting parse
 Entering state 0
@@ -236068,11 +234993,74 @@
 2.1: syntax error, unexpected '+'
 Error: popping nterm input (1.1-2.0: )
 Cleanup: discarding lookahead token '+' (2.1: )
-memory exhausted
-memory exhausted
-./torture.at:545: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token "number" (1.1: 1)
+Shifting token "number" (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.5: 2)
+Shifting token "number" (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Reading a token
+Next token is token "number" (1.9: 3)
+Shifting token "number" (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 20
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Reducing stack 0 by rule 18 (line 109):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
 stderr:
 Starting parse
 Entering state 0
@@ -236093,9 +235081,16 @@
 2.1: syntax error, unexpected '+'
 Error: popping nterm input (1.1-2.0: )
 Cleanup: discarding lookahead token '+' (2.1: )
-memory exhausted
-memory exhausted
-./torture.at:548: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+./calc.at:1494: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1491: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -236107,8 +235102,11 @@
   }eg
 ' expout || exit 77
 ./calc.at:1491: cat stderr
+./calc.at:1494: cat stderr
 ./calc.at:1491:  $PREPARSER ./calc  /dev/null
 stderr:
+stdout:
+stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -236116,7 +235114,9 @@
 1.1: syntax error, unexpected end of input
 Cleanup: discarding lookahead token end of input (1.1: )
 ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./torture.at:548: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./torture.at:497: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20
+stderr:
+./torture.at:497: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -236124,6 +235124,110 @@
 Now at end of input.
 1.1: syntax error, unexpected end of input
 Cleanup: discarding lookahead token end of input (1.1: )
+input:
+  | (#) + (#) = 2222
+./calc.at:1494:  $PREPARSER ./calc  input
+stderr:
+./torture.at:500: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
+Reading a token
+Next token is token ')' (1.3: )
+Entering state 11
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
+Entering state 4
+Reading a token
+1.8: syntax error: invalid character: '#'
+Next token is token error (1.8: )
+Shifting token error (1.8: )
+Entering state 11
+Next token is token error (1.8: )
+Error: discarding token error (1.8: )
+Reading a token
+Next token is token ')' (1.9: )
+Entering state 11
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.7: )
+   $2 = token error (1.8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
+Entering state 29
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
+Entering state 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 18
+Reading a token
+Next token is token "number" (1.13-16: 2222)
+Shifting token "number" (1.13-16: 2222)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+stderr:
 ./calc.at:1491: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -236134,13 +235238,133 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./torture.at:500: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./calc.at:1491: cat stderr
-617. existing.at:74: testing GNU AWK 3.1.0 Grammar: IELR(1) ...
-./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
+Reading a token
+Next token is token ')' (1.3: )
+Entering state 11
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
+Entering state 4
+Reading a token
+1.8: syntax error: invalid character: '#'
+Next token is token error (1.8: )
+Shifting token error (1.8: )
+Entering state 11
+Next token is token error (1.8: )
+Error: discarding token error (1.8: )
+Reading a token
+Next token is token ')' (1.9: )
+Entering state 11
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.7: )
+   $2 = token error (1.8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
+Entering state 29
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
+Entering state 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 18
+Reading a token
+Next token is token "number" (1.13-16: 2222)
+Shifting token "number" (1.13-16: 2222)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./torture.at:237: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+stderr:
+./torture.at:504: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000
 input:
+stderr:
+memory exhausted
+memory exhausted
+./torture.at:504: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
+stderr:
 ./calc.at:1491:  $PREPARSER ./calc  input
+memory exhausted
+memory exhausted
+./calc.at:1494: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./torture.at:510: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 stderr:
+./calc.at:1494: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -236390,6 +235614,7 @@
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+input:
 Starting parse
 Entering state 0
 Reading a token
@@ -236637,6 +235862,86 @@
 Entering state 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+  | (1 + #) = 1111
+./calc.at:1494:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
+Reading a token
+Next token is token ')' (1.7: )
+Entering state 11
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 18
+Reading a token
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1491: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -236647,12 +235952,105 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./torture.at:510: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
+Reading a token
+Next token is token ')' (1.7: )
+Entering state 11
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 18
+Reading a token
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1491: cat stderr
-./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
+./calc.at:1494: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 input:
   | (!!) + (1 2) = 1
 ./calc.at:1491:  $PREPARSER ./calc  input
+./calc.at:1494: cat stderr
 stderr:
+input:
+  | (# + 1) = 1111
+./calc.at:1494:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -236766,6 +236164,7 @@
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -236877,934 +236276,6 @@
 Entering state 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1491: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1491: cat stderr
-input:
-  | (- *) + (1 2) = 1
-./calc.at:1491:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
-Entering state 2
-Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Reducing stack 0 by rule 15 (line 106):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
-Shifting token error (1.2-4: )
-Entering state 11
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Reading a token
-Next token is token ')' (1.5: )
-Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Reading a token
-Next token is token number (1.10: 1)
-Shifting token number (1.10: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.10: 1)
--> $$ = nterm exp (1.10: 1)
-Entering state 12
-Reading a token
-Next token is token number (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Shifting token error (1.10-12: )
-Entering state 11
-Next token is token number (1.12: 2)
-Error: discarding token number (1.12: 2)
-Reading a token
-Next token is token ')' (1.13: )
-Entering state 11
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
-Entering state 29
-Reading a token
-Next token is token '=' (1.15: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
-Entering state 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
-Entering state 18
-Reading a token
-Next token is token number (1.17: 1)
-Shifting token number (1.17: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.17: 1)
--> $$ = nterm exp (1.17: 1)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
-Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2222)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of input (2.1: )
-Entering state 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '-' (1.2: )
-Shifting token '-' (1.2: )
-Entering state 2
-Reading a token
-Next token is token '*' (1.4: )
-1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.4: )
-Entering state 9
-Reducing stack 0 by rule 15 (line 106):
-   $1 = token '-' (1.2: )
-   $2 = token error (1.4: )
-Shifting token error (1.2-4: )
-Entering state 11
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Reading a token
-Next token is token ')' (1.5: )
-Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Reading a token
-Next token is token number (1.10: 1)
-Shifting token number (1.10: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.10: 1)
--> $$ = nterm exp (1.10: 1)
-Entering state 12
-Reading a token
-Next token is token number (1.12: 2)
-1.12: syntax error, unexpected number
-Error: popping nterm exp (1.10: 1)
-Shifting token error (1.10-12: )
-Entering state 11
-Next token is token number (1.12: 2)
-Error: discarding token number (1.12: 2)
-Reading a token
-Next token is token ')' (1.13: )
-Entering state 11
-Next token is token ')' (1.13: )
-Shifting token ')' (1.13: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10-12: )
-   $3 = token ')' (1.13: )
--> $$ = nterm exp (1.9-13: 1111)
-Entering state 29
-Reading a token
-Next token is token '=' (1.15: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-13: 1111)
--> $$ = nterm exp (1.1-13: 2222)
-Entering state 8
-Next token is token '=' (1.15: )
-Shifting token '=' (1.15: )
-Entering state 18
-Reading a token
-Next token is token number (1.17: 1)
-Shifting token number (1.17: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.17: 1)
--> $$ = nterm exp (1.17: 1)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-13: 2222)
-   $2 = token '=' (1.15: )
-   $3 = nterm exp (1.17: 1)
-1.1-17: error: 2222 != 1
--> $$ = nterm exp (1.1-17: 2222)
-Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2222)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of input (2.1: )
-Entering state 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1491: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1491: cat stderr
-input:
-  | (* *) + (*) + (*)
-./calc.at:1491:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
-Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Reading a token
-Next token is token ')' (1.5: )
-Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
-Reading a token
-Next token is token ')' (1.11: )
-Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
-Entering state 29
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
-Entering state 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
-Reading a token
-Next token is token ')' (1.17: )
-Entering state 11
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 29
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
-Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 3333)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of input (2.1: )
-Entering state 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.2: )
-1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.2: )
-Entering state 11
-Next token is token '*' (1.2: )
-Error: discarding token '*' (1.2: )
-Reading a token
-Next token is token '*' (1.4: )
-Error: discarding token '*' (1.4: )
-Reading a token
-Next token is token ')' (1.5: )
-Entering state 11
-Next token is token ')' (1.5: )
-Shifting token ')' (1.5: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-4: )
-   $3 = token ')' (1.5: )
--> $$ = nterm exp (1.1-5: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.7: )
-Shifting token '+' (1.7: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.9: )
-Shifting token '(' (1.9: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.10: )
-1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.10: )
-Entering state 11
-Next token is token '*' (1.10: )
-Error: discarding token '*' (1.10: )
-Reading a token
-Next token is token ')' (1.11: )
-Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.9: )
-   $2 = token error (1.10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.9-11: 1111)
-Entering state 29
-Reading a token
-Next token is token '+' (1.13: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-5: 1111)
-   $2 = token '+' (1.7: )
-   $3 = nterm exp (1.9-11: 1111)
--> $$ = nterm exp (1.1-11: 2222)
-Entering state 8
-Next token is token '+' (1.13: )
-Shifting token '+' (1.13: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.15: )
-Shifting token '(' (1.15: )
-Entering state 4
-Reading a token
-Next token is token '*' (1.16: )
-1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
-Shifting token error (1.16: )
-Entering state 11
-Next token is token '*' (1.16: )
-Error: discarding token '*' (1.16: )
-Reading a token
-Next token is token ')' (1.17: )
-Entering state 11
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.15: )
-   $2 = token error (1.16: )
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.15-17: 1111)
-Entering state 29
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-11: 2222)
-   $2 = token '+' (1.13: )
-   $3 = nterm exp (1.15-17: 1111)
--> $$ = nterm exp (1.1-17: 3333)
-Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 3333)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of input (2.1: )
-Entering state 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1491: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1491: cat stderr
-./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
-input:
-  | 1 + 2 * 3 + !+ ++
-./calc.at:1491:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Reducing stack 0 by rule 17 (line 108):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '+' (1.14: )
-Shifting token '+' (1.14: )
-Entering state 14
-Reducing stack 0 by rule 17 (line 108):
-   $1 = token '!' (1.13: )
-   $2 = token '+' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-input:
-  | 1 + 2 * 3 + !- ++
-./calc.at:1491:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Reducing stack 0 by rule 18 (line 109):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token number (1.1: 1)
-Shifting token number (1.1: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.1: 1)
--> $$ = nterm exp (1.1: 1)
-Entering state 8
-Reading a token
-Next token is token '+' (1.3: )
-Shifting token '+' (1.3: )
-Entering state 20
-Reading a token
-Next token is token number (1.5: 2)
-Shifting token number (1.5: 2)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.5: 2)
--> $$ = nterm exp (1.5: 2)
-Entering state 29
-Reading a token
-Next token is token '*' (1.7: )
-Shifting token '*' (1.7: )
-Entering state 21
-Reading a token
-Next token is token number (1.9: 3)
-Shifting token number (1.9: 3)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.9: 3)
--> $$ = nterm exp (1.9: 3)
-Entering state 30
-Reading a token
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 9 (line 92):
-   $1 = nterm exp (1.5: 2)
-   $2 = token '*' (1.7: )
-   $3 = nterm exp (1.9: 3)
--> $$ = nterm exp (1.5-9: 6)
-Entering state 29
-Next token is token '+' (1.11: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1: 1)
-   $2 = token '+' (1.3: )
-   $3 = nterm exp (1.5-9: 6)
--> $$ = nterm exp (1.1-9: 7)
-Entering state 8
-Next token is token '+' (1.11: )
-Shifting token '+' (1.11: )
-Entering state 20
-Reading a token
-Next token is token '!' (1.13: )
-Shifting token '!' (1.13: )
-Entering state 5
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 13
-Reducing stack 0 by rule 18 (line 109):
-   $1 = token '!' (1.13: )
-   $2 = token '-' (1.14: )
-Cleanup: popping token '+' (1.11: )
-Cleanup: popping nterm exp (1.1-9: 7)
-stderr:
-input.y:66.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:170.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:175.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:180.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:188.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:202.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:207.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:221.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:300.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:323.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y: error: 65 shift/reduce conflicts [-Werror=conflicts-sr]
-input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence]
-input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence]
-input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence]
-input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence]
-input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence]
-input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence]
-input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence]
-input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence]
-input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence]
-input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence]
-input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence]
-input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence]
-input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence]
-input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence]
-input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence]
-input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence]
-input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence]
-input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence]
-input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
-./existing.at:74: sed 's,.*/$,,' stderr 1>&2
-./calc.at:1491: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1491: cat stderr
-input:
-  | (#) + (#) = 2222
-./calc.at:1491:  $PREPARSER ./calc  input
-./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token ')' (1.3: )
-Entering state 11
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
-Entering state 4
-Reading a token
-1.8: syntax error: invalid character: '#'
-Next token is token error (1.8: )
-Shifting token error (1.8: )
-Entering state 11
-Next token is token error (1.8: )
-Error: discarding token error (1.8: )
-Reading a token
-Next token is token ')' (1.9: )
-Entering state 11
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 29
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
-Entering state 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 18
-Reading a token
-Next token is token number (1.13-16: 2222)
-Shifting token number (1.13-16: 2222)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of input (2.1: )
-Entering state 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -237819,131 +236290,11 @@
 Next token is token error (1.2: )
 Error: discarding token error (1.2: )
 Reading a token
-Next token is token ')' (1.3: )
-Entering state 11
-Next token is token ')' (1.3: )
-Shifting token ')' (1.3: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2: )
-   $3 = token ')' (1.3: )
--> $$ = nterm exp (1.1-3: 1111)
-Entering state 8
-Reading a token
-Next token is token '+' (1.5: )
-Shifting token '+' (1.5: )
-Entering state 20
-Reading a token
-Next token is token '(' (1.7: )
-Shifting token '(' (1.7: )
-Entering state 4
-Reading a token
-1.8: syntax error: invalid character: '#'
-Next token is token error (1.8: )
-Shifting token error (1.8: )
-Entering state 11
-Next token is token error (1.8: )
-Error: discarding token error (1.8: )
-Reading a token
-Next token is token ')' (1.9: )
-Entering state 11
-Next token is token ')' (1.9: )
-Shifting token ')' (1.9: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.7: )
-   $2 = token error (1.8: )
-   $3 = token ')' (1.9: )
--> $$ = nterm exp (1.7-9: 1111)
-Entering state 29
-Reading a token
-Next token is token '=' (1.11: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.1-3: 1111)
-   $2 = token '+' (1.5: )
-   $3 = nterm exp (1.7-9: 1111)
--> $$ = nterm exp (1.1-9: 2222)
-Entering state 8
-Next token is token '=' (1.11: )
-Shifting token '=' (1.11: )
-Entering state 18
-Reading a token
-Next token is token number (1.13-16: 2222)
-Shifting token number (1.13-16: 2222)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.13-16: 2222)
--> $$ = nterm exp (1.13-16: 2222)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.17-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-9: 2222)
-   $2 = token '=' (1.11: )
-   $3 = nterm exp (1.13-16: 2222)
--> $$ = nterm exp (1.1-16: 2222)
-Entering state 8
-Next token is token '\n' (1.17-2.0: )
-Shifting token '\n' (1.17-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-16: 2222)
-   $2 = token '\n' (1.17-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of input (2.1: )
-Entering state 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1491: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1491: cat stderr
-input:
-  | (1 + #) = 1111
-./calc.at:1491:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
 Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
+Error: discarding token '+' (1.4: )
 Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
+Next token is token "number" (1.6: 1)
+Error: discarding token "number" (1.6: 1)
 Reading a token
 Next token is token ')' (1.7: )
 Entering state 11
@@ -237960,12 +236311,12 @@
 Next token is token '=' (1.9: )
 Shifting token '=' (1.9: )
 Entering state 18
-Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
+Reading a token
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.11-14: 1111)
+   $1 = token "number" (1.11-14: 1111)
 -> $$ = nterm exp (1.11-14: 1111)
 Entering state 27
 Reading a token
@@ -237990,12 +236341,22 @@
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input (2.1: )
+Shifting token "end of input" (2.1: )
 Entering state 16
-Cleanup: popping token end of input (2.1: )
+Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1491: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Reading a token
@@ -238003,26 +236364,18 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
 Reading a token
 Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
+Error: discarding token '+' (1.4: )
 Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
+Next token is token "number" (1.6: 1)
+Error: discarding token "number" (1.6: 1)
 Reading a token
 Next token is token ')' (1.7: )
 Entering state 11
@@ -238040,11 +236393,11 @@
 Shifting token '=' (1.9: )
 Entering state 18
 Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
+Next token is token "number" (1.11-14: 1111)
+Shifting token "number" (1.11-14: 1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.11-14: 1111)
+   $1 = token "number" (1.11-14: 1111)
 -> $$ = nterm exp (1.11-14: 1111)
 Entering state 27
 Reading a token
@@ -238069,11 +236422,12 @@
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input (2.1: )
+Shifting token "end of input" (2.1: )
 Entering state 16
-Cleanup: popping token end of input (2.1: )
+Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1491: "$PERL" -pi -e 'use strict;
+./calc.at:1491: cat stderr
+./calc.at:1494: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -238083,10 +236437,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1491: cat stderr
 input:
-  | (# + 1) = 1111
+  | (- *) + (1 2) = 1
 ./calc.at:1491:  $PREPARSER ./calc  input
+./calc.at:1494: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -238095,56 +236449,103 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
+Entering state 2
 Reading a token
-Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Reducing stack 0 by rule 15 (line 106):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
+Shifting token error (1.2-4: )
+Entering state 11
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
 Reading a token
-Next token is token ')' (1.7: )
+Next token is token ')' (1.5: )
 Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
 Entering state 8
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Reading a token
+Next token is token number (1.10: 1)
+Shifting token number (1.10: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
+Entering state 12
+Reading a token
+Next token is token number (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
+Shifting token error (1.10-12: )
+Entering state 11
+Next token is token number (1.12: 2)
+Error: discarding token number (1.12: 2)
+Reading a token
+Next token is token ')' (1.13: )
+Entering state 11
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
+Entering state 29
+Reading a token
+Next token is token '=' (1.15: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
+Entering state 8
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
 Entering state 18
 Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
+Next token is token number (1.17: 1)
+Shifting token number (1.17: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
+   $1 = token number (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
 Entering state 27
 Reading a token
-Next token is token '\n' (1.15-2.0: )
+Next token is token '\n' (1.18-2.0: )
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
 Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
@@ -238158,6 +236559,10 @@
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | (1 + # + 1) = 1111
+./calc.at:1494:  $PREPARSER ./calc  input
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -238166,56 +236571,103 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
+Next token is token '-' (1.2: )
+Shifting token '-' (1.2: )
+Entering state 2
 Reading a token
-Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
+Next token is token '*' (1.4: )
+1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.4: )
+Entering state 9
+Reducing stack 0 by rule 15 (line 106):
+   $1 = token '-' (1.2: )
+   $2 = token error (1.4: )
+Shifting token error (1.2-4: )
+Entering state 11
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
 Reading a token
-Next token is token ')' (1.7: )
+Next token is token ')' (1.5: )
 Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
 Entering state 25
 Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
 Entering state 8
 Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Reading a token
+Next token is token number (1.10: 1)
+Shifting token number (1.10: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.10: 1)
+-> $$ = nterm exp (1.10: 1)
+Entering state 12
+Reading a token
+Next token is token number (1.12: 2)
+1.12: syntax error, unexpected number
+Error: popping nterm exp (1.10: 1)
+Shifting token error (1.10-12: )
+Entering state 11
+Next token is token number (1.12: 2)
+Error: discarding token number (1.12: 2)
+Reading a token
+Next token is token ')' (1.13: )
+Entering state 11
+Next token is token ')' (1.13: )
+Shifting token ')' (1.13: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10-12: )
+   $3 = token ')' (1.13: )
+-> $$ = nterm exp (1.9-13: 1111)
+Entering state 29
+Reading a token
+Next token is token '=' (1.15: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-13: 1111)
+-> $$ = nterm exp (1.1-13: 2222)
+Entering state 8
+Next token is token '=' (1.15: )
+Shifting token '=' (1.15: )
 Entering state 18
 Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
+Next token is token number (1.17: 1)
+Shifting token number (1.17: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
+   $1 = token number (1.17: 1)
+-> $$ = nterm exp (1.17: 1)
 Entering state 27
 Reading a token
-Next token is token '\n' (1.15-2.0: )
+Next token is token '\n' (1.18-2.0: )
 Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
+   $1 = nterm exp (1.1-13: 2222)
+   $2 = token '=' (1.15: )
+   $3 = nterm exp (1.17: 1)
+1.1-17: error: 2222 != 1
+-> $$ = nterm exp (1.1-17: 2222)
 Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
+   $1 = nterm exp (1.1-17: 2222)
+   $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
 Reducing stack 0 by rule 1 (line 69):
@@ -238228,27 +236680,6 @@
 Entering state 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-stderr:
-./calc.at:1491: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-stdout:
-./torture.at:141:  $PREPARSER ./input
-./calc.at:1491: cat stderr
-stderr:
-./torture.at:141: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1491:  $PREPARSER ./calc  input
-604. torture.at:132:  ok
-stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -238256,11 +236687,11 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.2: 1)
+   $1 = token "number" (1.2: 1)
 -> $$ = nterm exp (1.2: 1)
 Entering state 12
 Reading a token
@@ -238280,8 +236711,8 @@
 Next token is token '+' (1.8: )
 Error: discarding token '+' (1.8: )
 Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
+Next token is token "number" (1.10: 1)
+Error: discarding token "number" (1.10: 1)
 Reading a token
 Next token is token ')' (1.11: )
 Entering state 11
@@ -238299,11 +236730,11 @@
 Shifting token '=' (1.13: )
 Entering state 18
 Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
+Next token is token "number" (1.15-18: 1111)
+Shifting token "number" (1.15-18: 1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.15-18: 1111)
+   $1 = token "number" (1.15-18: 1111)
 -> $$ = nterm exp (1.15-18: 1111)
 Entering state 27
 Reading a token
@@ -238328,12 +236759,23 @@
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input (2.1: )
+Shifting token "end of input" (2.1: )
 Entering state 16
-Cleanup: popping token end of input (2.1: )
+Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1491: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 stderr:
+./calc.at:1491: cat stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -238341,11 +236783,11 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.2: 1)
+   $1 = token "number" (1.2: 1)
 -> $$ = nterm exp (1.2: 1)
 Entering state 12
 Reading a token
@@ -238365,8 +236807,8 @@
 Next token is token '+' (1.8: )
 Error: discarding token '+' (1.8: )
 Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
+Next token is token "number" (1.10: 1)
+Error: discarding token "number" (1.10: 1)
 Reading a token
 Next token is token ')' (1.11: )
 Entering state 11
@@ -238384,11 +236826,11 @@
 Shifting token '=' (1.13: )
 Entering state 18
 Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
+Next token is token "number" (1.15-18: 1111)
+Shifting token "number" (1.15-18: 1111)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.15-18: 1111)
+   $1 = token "number" (1.15-18: 1111)
 -> $$ = nterm exp (1.15-18: 1111)
 Entering state 27
 Reading a token
@@ -238413,44 +236855,13 @@
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input (2.1: )
+Shifting token "end of input" (2.1: )
 Entering state 16
-Cleanup: popping token end of input (2.1: )
+Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 stderr:
-
-input.y:66.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:170.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:175.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:180.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:188.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:202.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:207.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:221.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:300.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:323.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y: error: 65 shift/reduce conflicts [-Werror=conflicts-sr]
-input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence]
-input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence]
-input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence]
-input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence]
-input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence]
-input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence]
-input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence]
-input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence]
-input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence]
-input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence]
-input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence]
-input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence]
-input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence]
-input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence]
-input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence]
-input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence]
-input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence]
-input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence]
-input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
-./calc.at:1491: "$PERL" -pi -e 'use strict;
+input:
+./calc.at:1494: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
     my $unexp = $1;
@@ -238460,12 +236871,21 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1491: cat stderr
-./existing.at:74: sed 's,.*/$,,' stderr 1>&2
-./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error
-input:
-  | (1 + 1) / (1 - 1)
+stdout:
+  | (* *) + (*) + (*)
 ./calc.at:1491:  $PREPARSER ./calc  input
+./calc.at:1492: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc calc.hh
+
+./calc.at:1494: cat stderr
 stderr:
 Starting parse
 Entering state 0
@@ -238474,101 +236894,236 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
 Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Reading a token
+Next token is token ')' (1.5: )
+Entering state 11
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
 Entering state 20
 Reading a token
-Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.6: 1)
--> $$ = nterm exp (1.6: 1)
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
+Entering state 11
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
+Reading a token
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.9-11: 1111)
 Entering state 29
 Reading a token
-Next token is token ')' (1.7: )
+Next token is token '+' (1.13: )
 Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
+Entering state 8
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
+Reading a token
+Next token is token ')' (1.17: )
+Entering state 11
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 29
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
+Entering state 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-17: 3333)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input (2.1: )
+Entering state 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+input:
+./calc.at:1492:  $PREPARSER ./calc  input
+stderr:
+  | (1 + 1) / (1 - 1)
+./calc.at:1494:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.2: )
+1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.2: )
+Entering state 11
+Next token is token '*' (1.2: )
+Error: discarding token '*' (1.2: )
+Reading a token
+Next token is token '*' (1.4: )
+Error: discarding token '*' (1.4: )
+Reading a token
+Next token is token ')' (1.5: )
+Entering state 11
+Next token is token ')' (1.5: )
+Shifting token ')' (1.5: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
    $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
+   $2 = token error (1.2-4: )
+   $3 = token ')' (1.5: )
+-> $$ = nterm exp (1.1-5: 1111)
 Entering state 8
 Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 22
+Next token is token '+' (1.7: )
+Shifting token '+' (1.7: )
+Entering state 20
 Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
+Next token is token '(' (1.9: )
+Shifting token '(' (1.9: )
 Entering state 4
 Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
+Next token is token '*' (1.10: )
+1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.10: )
+Entering state 11
+Next token is token '*' (1.10: )
+Error: discarding token '*' (1.10: )
 Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 19
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.9: )
+   $2 = token error (1.10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.9-11: 1111)
+Entering state 29
 Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 28
+Next token is token '+' (1.13: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-5: 1111)
+   $2 = token '+' (1.7: )
+   $3 = nterm exp (1.9-11: 1111)
+-> $$ = nterm exp (1.1-11: 2222)
+Entering state 8
+Next token is token '+' (1.13: )
+Shifting token '+' (1.13: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.15: )
+Shifting token '(' (1.15: )
+Entering state 4
+Reading a token
+Next token is token '*' (1.16: )
+1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!'
+Shifting token error (1.16: )
+Entering state 11
+Next token is token '*' (1.16: )
+Error: discarding token '*' (1.16: )
 Reading a token
 Next token is token ')' (1.17: )
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
+Entering state 11
 Next token is token ')' (1.17: )
 Shifting token ')' (1.17: )
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.15: )
+   $2 = token error (1.16: )
    $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 31
+-> $$ = nterm exp (1.15-17: 1111)
+Entering state 29
 Reading a token
 Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 10 (line 93):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-11: 2222)
+   $2 = token '+' (1.13: )
+   $3 = nterm exp (1.15-17: 1111)
+-> $$ = nterm exp (1.1-17: 3333)
 Entering state 8
 Next token is token '\n' (1.18-2.0: )
 Shifting token '\n' (1.18-2.0: )
 Entering state 24
 Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2)
+   $1 = nterm exp (1.1-17: 3333)
    $2 = token '\n' (1.18-2.0: )
 -> $$ = nterm line (1.1-2.0: )
 Entering state 7
@@ -238582,7 +237137,6 @@
 Entering state 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -238591,11 +237145,11 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.2: 1)
+   $1 = token "number" (1.2: 1)
 -> $$ = nterm exp (1.2: 1)
 Entering state 12
 Reading a token
@@ -238603,11 +237157,11 @@
 Shifting token '+' (1.4: )
 Entering state 20
 Reading a token
-Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
+Next token is token "number" (1.6: 1)
+Shifting token "number" (1.6: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.6: 1)
+   $1 = token "number" (1.6: 1)
 -> $$ = nterm exp (1.6: 1)
 Entering state 29
 Reading a token
@@ -238636,11 +237190,11 @@
 Shifting token '(' (1.11: )
 Entering state 4
 Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
+Next token is token "number" (1.12: 1)
+Shifting token "number" (1.12: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.12: 1)
+   $1 = token "number" (1.12: 1)
 -> $$ = nterm exp (1.12: 1)
 Entering state 12
 Reading a token
@@ -238648,11 +237202,11 @@
 Shifting token '-' (1.14: )
 Entering state 19
 Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
+Next token is token "number" (1.16: 1)
+Shifting token "number" (1.16: 1)
 Entering state 1
 Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.16: 1)
+   $1 = token "number" (1.16: 1)
 -> $$ = nterm exp (1.16: 1)
 Entering state 28
 Reading a token
@@ -238695,295 +237249,10 @@
 Entering state 6
 Reading a token
 Now at end of input.
-Shifting token end of input (2.1: )
+Shifting token "end of input" (2.1: )
 Entering state 16
-Cleanup: popping token end of input (2.1: )
+Cleanup: popping token "end of input" (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1491: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-618. existing.at:74: testing GNU AWK 3.1.0 Grammar: Canonical LR(1) ...
-./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
-./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
-./calc.at:1491: cat stderr
-566. calc.at:1491:  ok
-
-stderr:
-stdout:
-./torture.at:551: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20
-stderr:
-./torture.at:551: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./torture.at:553: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900
-stderr:
-./torture.at:553: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./torture.at:555: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000
-stderr:
-memory exhausted
-memory exhausted
-./torture.at:555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-memory exhausted
-memory exhausted
-615. torture.at:531:  ok
-619. existing.at:808: testing GNU Cim Grammar: LALR(1) ...
-./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
-
-./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
-./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none
-620. existing.at:808: testing GNU Cim Grammar: IELR(1) ...
-./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
-./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none
-./existing.at:74: sed -n 's/^State //p' input.output | tail -1
-./existing.at:74: sed 's/^%define lr.type .*$//' input.y > input-lalr.y
-./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --report=all,no-cex input-lalr.y
-./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
-./existing.at:74: sed -n 's/^State //p' input.output | tail -1
-./existing.at:74: sed 's/^%define lr.type .*$//' input.y > input-lalr.y
-./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --report=all,no-cex input-lalr.y
-./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
-stderr:
-input-lalr.y: warning: 65 shift/reduce conflicts [-Wconflicts-sr]
-input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-stdout:
-./existing.at:74: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//'
-./existing.at:74: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-stderr:
-input-lalr.y: warning: 65 shift/reduce conflicts [-Wconflicts-sr]
-input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-stdout:
-./existing.at:74: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//'
-./existing.at:74: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-stderr:
-input.y:128.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:137.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:142.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:161.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:179.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:205.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:213.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:225.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:292.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:294.20: error: empty rule without %empty [-Werror=empty-rule]
-input.y:367.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:373.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:387.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:401.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:413.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:443.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:471.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:474.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:489.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:506.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:587.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:591.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y: error: 78 shift/reduce conflicts [-Werror=conflicts-sr]
-input.y: error: 10 reduce/reduce conflicts [-Werror=conflicts-rr]
-input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence]
-input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence]
-input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence]
-input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence]
-input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence]
-input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence]
-input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
-./existing.at:808: sed 's,.*/$,,' stderr 1>&2
-./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error
-stderr:
-input.y:128.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:137.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:142.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:161.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:179.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:205.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:213.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:225.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:292.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:294.20: error: empty rule without %empty [-Werror=empty-rule]
-input.y:367.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:373.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:387.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:401.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:413.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:443.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:471.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:474.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:489.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:506.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:587.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:591.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y: error: 78 shift/reduce conflicts [-Werror=conflicts-sr]
-input.y: error: 10 reduce/reduce conflicts [-Werror=conflicts-rr]
-input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence]
-input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence]
-input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence]
-input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence]
-input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence]
-input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence]
-input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
-./existing.at:808: sed 's,.*/$,,' stderr 1>&2
-./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error
-./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
-stderr:
-stdout:
-./existing.at:74:  $PREPARSER ./input
-stderr:
-syntax error, unexpected '*', expecting NEWLINE or '{' or ';'
-./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-616. existing.at:74:  ok
-
-621. existing.at:808: testing GNU Cim Grammar: Canonical LR(1) ...
-./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
-./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
-stderr:
-stdout:
-./existing.at:74:  $PREPARSER ./input
-stderr:
-./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-617. existing.at:74:  ok
-
-./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
-622. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: LALR(1) ...
-./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
-./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none
-./torture.at:394: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none
-./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
-./existing.at:808: sed -n 's/^State //p' input.output | tail -1
-./existing.at:808: sed 's/^%define lr.type .*$//' input.y > input-lalr.y
-./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --report=all,no-cex input-lalr.y
-./existing.at:808: sed -n 's/^State //p' input.output | tail -1
-./existing.at:808: sed 's/^%define lr.type .*$//' input.y > input-lalr.y
-./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --report=all,no-cex input-lalr.y
-stderr:
-input.y:202.20: error: empty rule without %empty [-Werror=empty-rule]
-input.y:270.7: error: empty rule without %empty [-Werror=empty-rule]
-input.y:292.13: error: empty rule without %empty [-Werror=empty-rule]
-input.y:309.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:382.14: error: empty rule without %empty [-Werror=empty-rule]
-input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other]
-input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence]
-input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence]
-input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence]
-input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence]
-input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence]
-input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence]
-input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence]
-input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence]
-input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence]
-input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence]
-input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence]
-input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence]
-input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence]
-input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence]
-input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence]
-input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence]
-input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence]
-input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence]
-input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence]
-input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence]
-input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence]
-input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence]
-input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence]
-input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence]
-input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence]
-input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence]
-input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence]
-input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence]
-input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence]
-input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence]
-input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence]
-input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence]
-input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence]
-input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence]
-input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence]
-input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence]
-input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence]
-input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence]
-input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence]
-input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence]
-input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence]
-input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence]
-input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence]
-input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence]
-input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence]
-input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence]
-input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence]
-input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence]
-input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence]
-input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence]
-input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence]
-input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence]
-input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence]
-input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence]
-input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence]
-input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence]
-input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence]
-input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence]
-input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence]
-input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence]
-input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence]
-input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence]
-input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence]
-input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence]
-input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence]
-input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence]
-input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
-./existing.at:1460: sed 's,.*/$,,' stderr 1>&2
-./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error
-stderr:
-stdout:
-./calc.at:1492: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc calc.hh
-
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1492:  $PREPARSER ./calc  input
-stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -239821,42 +238090,139 @@
 Entering state 16
 Cleanup: popping token end of input (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
-input.y:66.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:170.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:175.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:180.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:188.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:202.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:207.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:221.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:300.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y:323.10: error: empty rule without %empty [-Werror=empty-rule]
-input.y: error: 265 shift/reduce conflicts [-Werror=conflicts-sr]
-input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence]
-input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence]
-input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence]
-input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence]
-input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence]
-input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence]
-input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence]
-input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence]
-input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence]
-input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence]
-input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence]
-input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence]
-input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence]
-input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence]
-input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence]
-input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence]
-input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence]
-input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence]
-input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
+./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1491: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
 Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.2: 1)
+Shifting token "number" (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+Next token is token "number" (1.6: 1)
+Shifting token "number" (1.6: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 29
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Reading a token
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 22
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Reading a token
+Next token is token "number" (1.12: 1)
+Shifting token "number" (1.12: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 19
+Reading a token
+Next token is token "number" (1.16: 1)
+Shifting token "number" (1.16: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token "number" (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 31
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 10 (line 93):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token "end of input" (2.1: )
+Entering state 16
+Cleanup: popping token "end of input" (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1491: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
 Next token is token number (1.1: 1)
 Shifting token number (1.1: 1)
 Entering state 1
@@ -240691,10 +239057,22 @@
 Cleanup: popping token end of input (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
 input:
-./existing.at:74: sed 's,.*/$,,' stderr 1>&2
+input:
+./calc.at:1494: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
   | 1 2
+  | 1 + 2 * 3 + !+ ++
 ./calc.at:1492:  $PREPARSER ./calc  input
-./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error
+./calc.at:1491:  $PREPARSER ./calc  input
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -240711,8 +239089,76 @@
 1.3: syntax error, unexpected number
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token number (1.3: 2)
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 20
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Reducing stack 0 by rule 17 (line 108):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1494: cat stderr
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+569. calc.at:1494:  ok
 Starting parse
 Entering state 0
 Reading a token
@@ -240728,6 +239174,72 @@
 1.3: syntax error, unexpected number
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token number (1.3: 2)
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 20
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '+' (1.14: )
+Shifting token '+' (1.14: )
+Entering state 14
+Reducing stack 0 by rule 17 (line 108):
+   $1 = token '!' (1.13: )
+   $2 = token '+' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
 ./calc.at:1492: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -240738,11 +239250,151 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+
+input:
+  | 1 + 2 * 3 + !- ++
+./calc.at:1491:  $PREPARSER ./calc  input
 ./calc.at:1492: cat stderr
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 20
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Reducing stack 0 by rule 18 (line 109):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
   | 1//2
 ./calc.at:1492:  $PREPARSER ./calc  input
 stderr:
+stderr:
+stdout:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token number (1.1: 1)
+Shifting token number (1.1: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.1: 1)
+-> $$ = nterm exp (1.1: 1)
+Entering state 8
+Reading a token
+Next token is token '+' (1.3: )
+Shifting token '+' (1.3: )
+Entering state 20
+Reading a token
+Next token is token number (1.5: 2)
+Shifting token number (1.5: 2)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.5: 2)
+-> $$ = nterm exp (1.5: 2)
+Entering state 29
+Reading a token
+Next token is token '*' (1.7: )
+Shifting token '*' (1.7: )
+Entering state 21
+Reading a token
+Next token is token number (1.9: 3)
+Shifting token number (1.9: 3)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.9: 3)
+-> $$ = nterm exp (1.9: 3)
+Entering state 30
+Reading a token
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 9 (line 92):
+   $1 = nterm exp (1.5: 2)
+   $2 = token '*' (1.7: )
+   $3 = nterm exp (1.9: 3)
+-> $$ = nterm exp (1.5-9: 6)
+Entering state 29
+Next token is token '+' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1: 1)
+   $2 = token '+' (1.3: )
+   $3 = nterm exp (1.5-9: 6)
+-> $$ = nterm exp (1.1-9: 7)
+Entering state 8
+Next token is token '+' (1.11: )
+Shifting token '+' (1.11: )
+Entering state 20
+Reading a token
+Next token is token '!' (1.13: )
+Shifting token '!' (1.13: )
+Entering state 5
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 13
+Reducing stack 0 by rule 18 (line 109):
+   $1 = token '!' (1.13: )
+   $2 = token '-' (1.14: )
+Cleanup: popping token '+' (1.11: )
+Cleanup: popping nterm exp (1.1-9: 7)
+stderr:
+./torture.at:238:  $PREPARSER ./input
 Starting parse
 Entering state 0
 Reading a token
@@ -240763,8 +239415,21 @@
 Error: popping token '/' (1.2: )
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '/' (1.3: )
+stderr:
+stderr:
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stdout:
+./calc.at:1491: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Reading a token
@@ -240785,6 +239450,13 @@
 Error: popping token '/' (1.2: )
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '/' (1.3: )
+./torture.at:238: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./torture.at:513: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20
+stderr:
+./torture.at:513: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+605. torture.at:216:  ok
+615. torture.at:531: testing Exploding the Stack Size with Malloc ...
 ./calc.at:1492: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -240795,32 +239467,251 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./torture.at:535: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+./calc.at:1491: cat stderr
+./torture.at:515: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900
+stderr:
+input:
+./torture.at:515: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+  | (#) + (#) = 2222
+./calc.at:1491:  $PREPARSER ./calc  input
+stderr:
 ./calc.at:1492: cat stderr
+
+./torture.at:517: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000
+stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
+Reading a token
+Next token is token ')' (1.3: )
+Entering state 11
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
+Entering state 4
+Reading a token
+1.8: syntax error: invalid character: '#'
+Next token is token error (1.8: )
+Shifting token error (1.8: )
+Entering state 11
+Next token is token error (1.8: )
+Error: discarding token error (1.8: )
+Reading a token
+Next token is token ')' (1.9: )
+Entering state 11
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.7: )
+   $2 = token error (1.8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
+Entering state 29
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
+Entering state 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 18
+Reading a token
+Next token is token number (1.13-16: 2222)
+Shifting token number (1.13-16: 2222)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input (2.1: )
+Entering state 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+memory exhausted
+memory exhausted
+./torture.at:517: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
   | error
 ./calc.at:1492:  $PREPARSER ./calc  input
 stderr:
+stderr:
+memory exhausted
+memory exhausted
 Starting parse
 Entering state 0
 Reading a token
 Next token is token invalid token (1.1: )
 1.1: syntax error, unexpected invalid token
 Cleanup: discarding lookahead token invalid token (1.1: )
-stderr:
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-input-lalr.y: warning: 78 shift/reduce conflicts [-Wconflicts-sr]
-input-lalr.y: warning: 10 reduce/reduce conflicts [-Wconflicts-rr]
-input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-stdout:
 stderr:
-./existing.at:808: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//'
+614. torture.at:485:  ok
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
+Reading a token
+Next token is token ')' (1.3: )
+Entering state 11
+Next token is token ')' (1.3: )
+Shifting token ')' (1.3: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2: )
+   $3 = token ')' (1.3: )
+-> $$ = nterm exp (1.1-3: 1111)
+Entering state 8
+Reading a token
+Next token is token '+' (1.5: )
+Shifting token '+' (1.5: )
+Entering state 20
+Reading a token
+Next token is token '(' (1.7: )
+Shifting token '(' (1.7: )
+Entering state 4
+Reading a token
+1.8: syntax error: invalid character: '#'
+Next token is token error (1.8: )
+Shifting token error (1.8: )
+Entering state 11
+Next token is token error (1.8: )
+Error: discarding token error (1.8: )
+Reading a token
+Next token is token ')' (1.9: )
+Entering state 11
+Next token is token ')' (1.9: )
+Shifting token ')' (1.9: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.7: )
+   $2 = token error (1.8: )
+   $3 = token ')' (1.9: )
+-> $$ = nterm exp (1.7-9: 1111)
+Entering state 29
+Reading a token
+Next token is token '=' (1.11: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.1-3: 1111)
+   $2 = token '+' (1.5: )
+   $3 = nterm exp (1.7-9: 1111)
+-> $$ = nterm exp (1.1-9: 2222)
+Entering state 8
+Next token is token '=' (1.11: )
+Shifting token '=' (1.11: )
+Entering state 18
+Reading a token
+Next token is token number (1.13-16: 2222)
+Shifting token number (1.13-16: 2222)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.13-16: 2222)
+-> $$ = nterm exp (1.13-16: 2222)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.17-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-9: 2222)
+   $2 = token '=' (1.11: )
+   $3 = nterm exp (1.13-16: 2222)
+-> $$ = nterm exp (1.1-16: 2222)
+Entering state 8
+Next token is token '\n' (1.17-2.0: )
+Shifting token '\n' (1.17-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-16: 2222)
+   $2 = token '\n' (1.17-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input (2.1: )
+Entering state 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 Starting parse
 Entering state 0
 Reading a token
 Next token is token invalid token (1.1: )
 1.1: syntax error, unexpected invalid token
 Cleanup: discarding lookahead token invalid token (1.1: )
-./existing.at:808: grep '^State.*conflicts:' input.output
+./calc.at:1491: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1492: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -240831,12 +239722,96 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./existing.at:808: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./calc.at:1492: cat stderr
+./calc.at:1491: cat stderr
+input:
+  | (1 + #) = 1111
+./calc.at:1491:  $PREPARSER ./calc  input
+stderr:
 input:
+./torture.at:535: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
+Reading a token
+Next token is token ')' (1.7: )
+Entering state 11
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 18
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input (2.1: )
+Entering state 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
   | 1 = 2 = 3
 ./calc.at:1492:  $PREPARSER ./calc  input
 stderr:
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -240866,8 +239841,85 @@
 Error: popping token '=' (1.3: )
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '=' (1.7: )
-./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
+Reading a token
+Next token is token ')' (1.7: )
+Entering state 11
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 18
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input (2.1: )
+Entering state 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 stderr:
 Starting parse
 Entering state 0
@@ -240898,12 +239950,19 @@
 Error: popping token '=' (1.3: )
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '=' (1.7: )
-input-lalr.y: warning: 78 shift/reduce conflicts [-Wconflicts-sr]
-input-lalr.y: warning: 10 reduce/reduce conflicts [-Wconflicts-rr]
-input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-stdout:
-./existing.at:808: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//'
-./existing.at:808: grep '^State.*conflicts:' input.output
+616. existing.at:74: testing GNU AWK 3.1.0 Grammar: LALR(1) ...
+./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
+./calc.at:1491: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+./calc.at:1491: cat stderr
 ./calc.at:1492: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -240914,13 +239973,158 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./existing.at:808: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./calc.at:1492: cat stderr
 input:
+  | (# + 1) = 1111
+./calc.at:1491:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Reading a token
+Next token is token number (1.6: 1)
+Error: discarding token number (1.6: 1)
+Reading a token
+Next token is token ')' (1.7: )
+Entering state 11
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 18
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input (2.1: )
+Entering state 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+input:
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+617. existing.at:74: testing GNU AWK 3.1.0 Grammar: IELR(1) ...
+./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
   | 
   | +1
 ./calc.at:1492:  $PREPARSER ./calc  input
 stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Reading a token
+Next token is token number (1.6: 1)
+Error: discarding token number (1.6: 1)
+Reading a token
+Next token is token ')' (1.7: )
+Entering state 11
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 18
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input (2.1: )
+Entering state 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 Starting parse
 Entering state 0
 Reading a token
@@ -240942,6 +240146,16 @@
 Cleanup: discarding lookahead token '+' (2.1: )
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./calc.at:1491: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 Starting parse
 Entering state 0
 Reading a token
@@ -240971,7 +240185,95 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1491: cat stderr
 ./calc.at:1492: cat stderr
+input:
+  | (1 + # + 1) = 1111
+./calc.at:1491:  $PREPARSER ./calc  input
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Reading a token
+Next token is token number (1.10: 1)
+Error: discarding token number (1.10: 1)
+Reading a token
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 18
+Reading a token
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input (2.1: )
+Entering state 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1492:  $PREPARSER ./calc  /dev/null
 stderr:
 Starting parse
@@ -240980,6 +240282,7 @@
 Now at end of input.
 1.1: syntax error, unexpected end of input
 Cleanup: discarding lookahead token end of input (1.1: )
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
@@ -240988,6 +240291,90 @@
 Now at end of input.
 1.1: syntax error, unexpected end of input
 Cleanup: discarding lookahead token end of input (1.1: )
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Reading a token
+Next token is token number (1.10: 1)
+Error: discarding token number (1.10: 1)
+Reading a token
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 18
+Reading a token
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input (2.1: )
+Entering state 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1492: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -240998,11 +240385,30 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1491: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
 ./calc.at:1492: cat stderr
+stdout:
+./torture.at:141:  $PREPARSER ./input
+stderr:
+./calc.at:1491: cat stderr
+./torture.at:141: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+604. torture.at:132:  ok
 input:
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 ./calc.at:1492:  $PREPARSER ./calc  input
+input:
 stderr:
+  | (1 + 1) / (1 - 1)
 Starting parse
 Entering state 0
 Reading a token
@@ -241250,6 +240656,7 @@
 Entering state 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./calc.at:1491:  $PREPARSER ./calc  input
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
@@ -241259,6 +240666,122 @@
 Shifting token '(' (1.1: )
 Entering state 4
 Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+Next token is token number (1.6: 1)
+Shifting token number (1.6: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 29
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Reading a token
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 22
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Reading a token
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 19
+Reading a token
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 31
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 10 (line 93):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input (2.1: )
+Entering state 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
 Next token is token ')' (1.2: )
 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!'
 Shifting token error (1.2: )
@@ -241499,7 +241022,125 @@
 Entering state 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
+./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+Next token is token number (1.6: 1)
+Shifting token number (1.6: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 29
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Reading a token
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 22
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Reading a token
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 19
+Reading a token
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 31
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 10 (line 93):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input (2.1: )
+Entering state 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
 ./calc.at:1492: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -241510,11 +241151,25 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1491: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1492: cat stderr
+./calc.at:1491: cat stderr
+566. calc.at:1491:  ok
 input:
   | (!!) + (1 2) = 1
 ./calc.at:1492:  $PREPARSER ./calc  input
 stderr:
+stdout:
+stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -241626,8 +241281,13 @@
 Entering state 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
+./torture.at:538: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20
+
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./torture.at:538: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -241739,6 +241399,7 @@
 Entering state 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+stderr:
 ./calc.at:1492: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -241750,10 +241411,21 @@
   }eg
 ' expout || exit 77
 ./calc.at:1492: cat stderr
+618. existing.at:74: testing GNU AWK 3.1.0 Grammar: Canonical LR(1) ...
+./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
+./torture.at:541: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900
+stderr:
+./torture.at:541: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
+stderr:
   | (- *) + (1 2) = 1
 ./calc.at:1492:  $PREPARSER ./calc  input
+./torture.at:545: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000
 stderr:
+memory exhausted
+memory exhausted
+stderr:
+./torture.at:545: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reading a token
@@ -241870,7 +241542,13 @@
 Entering state 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+stderr:
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+619. existing.at:808: testing GNU Cim Grammar: LALR(1) ...
+./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
+memory exhausted
+memory exhausted
+./torture.at:548: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 stderr:
 Starting parse
 Entering state 0
@@ -241998,7 +241676,40 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+input.y:66.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:170.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:175.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:180.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:188.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:202.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:207.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:221.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:300.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:323.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y: error: 65 shift/reduce conflicts [-Werror=conflicts-sr]
+input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence]
+input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence]
+input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence]
+input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence]
+input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence]
+input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence]
+input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence]
+input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence]
+input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence]
+input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence]
+input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence]
+input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence]
+input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence]
+input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence]
+input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence]
+input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence]
+input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence]
+input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence]
+input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
 ./calc.at:1492: cat stderr
+./existing.at:74: sed 's,.*/$,,' stderr 1>&2
 input:
   | (* *) + (*) + (*)
 ./calc.at:1492:  $PREPARSER ./calc  input
@@ -242119,7 +241830,41 @@
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./torture.at:548: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
+./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error
+stderr:
+input.y:66.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:170.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:175.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:180.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:188.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:202.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:207.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:221.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:300.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:323.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y: error: 65 shift/reduce conflicts [-Werror=conflicts-sr]
+input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence]
+input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence]
+input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence]
+input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence]
+input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence]
+input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence]
+input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence]
+input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence]
+input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence]
+input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence]
+input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence]
+input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence]
+input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence]
+input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence]
+input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence]
+input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence]
+input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence]
+input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence]
+input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
 Starting parse
 Entering state 0
 Reading a token
@@ -242235,6 +241980,8 @@
 Entering state 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./existing.at:74: sed 's,.*/$,,' stderr 1>&2
+./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error
 ./calc.at:1492: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -242384,11 +242131,8 @@
 Cleanup: popping nterm exp (1.1-9: 7)
 input:
   | 1 + 2 * 3 + !- ++
-stderr:
 ./calc.at:1492:  $PREPARSER ./calc  input
-stdout:
 stderr:
-./existing.at:808:  $PREPARSER ./input
 Starting parse
 Entering state 0
 Reading a token
@@ -242454,11 +242198,8 @@
    $2 = token '-' (1.14: )
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-stderr:
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-619. existing.at:808:  ok
 Starting parse
 Entering state 0
 Reading a token
@@ -242524,9 +242265,6 @@
    $2 = token '-' (1.14: )
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-stderr:
-stdout:
-./existing.at:808:  $PREPARSER ./input
 ./calc.at:1492: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -242537,15 +242275,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
-./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-
-620. existing.at:808:  ok
 ./calc.at:1492: cat stderr
 input:
   | (#) + (#) = 2222
 ./calc.at:1492:  $PREPARSER ./calc  input
-
 stderr:
 Starting parse
 Entering state 0
@@ -242645,6 +242378,8 @@
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
+./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
 stderr:
 Starting parse
 Entering state 0
@@ -242754,10 +242489,23 @@
   }eg
 ' expout || exit 77
 ./calc.at:1492: cat stderr
+stderr:
 input:
   | (1 + #) = 1111
 ./calc.at:1492:  $PREPARSER ./calc  input
 stderr:
+stdout:
+./calc.at:1494: "$PERL" -ne '
+  chomp;
+  print "$ARGV:$.: {$_}\n"
+    if (# No starting/ending empty lines.
+        (eof || $. == 1) && /^\s*$/
+        # No trailing space.
+        || /\s$/
+        # No tabs.
+        || /\t/
+        )' calc.cc calc.hh
+
 Starting parse
 Entering state 0
 Reading a token
@@ -242836,7 +242584,22 @@
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+input:
 stderr:
+  | 1 + 2 * 3 = 7
+  | 1 + 2 * -3 = -5
+  | 
+  | -1^2 = -1
+  | (-1)^2 = 1
+  | 
+  | ---1 = -1
+  | 
+  | 1 - 2 - 3 = -4
+  | 1 - (2 - 3) = 2
+  | 
+  | 2^2^3 = 256
+  | (2^2)^3 = 64
+./calc.at:1494:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -242914,349 +242677,6 @@
 Entering state 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-623. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: IELR(1) ...
-./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
-./calc.at:1492: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-624. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: Canonical LR(1) ...
-./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
-./calc.at:1492: cat stderr
-input:
-  | (# + 1) = 1111
-./calc.at:1492:  $PREPARSER ./calc  input
-stderr:
-./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Reading a token
-Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
-Reading a token
-Next token is token ')' (1.7: )
-Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 18
-Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of input (2.1: )
-Entering state 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-1.2: syntax error: invalid character: '#'
-Next token is token error (1.2: )
-Shifting token error (1.2: )
-Entering state 11
-Next token is token error (1.2: )
-Error: discarding token error (1.2: )
-Reading a token
-Next token is token '+' (1.4: )
-Error: discarding token '+' (1.4: )
-Reading a token
-Next token is token number (1.6: 1)
-Error: discarding token number (1.6: 1)
-Reading a token
-Next token is token ')' (1.7: )
-Entering state 11
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-6: )
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.9: )
-Shifting token '=' (1.9: )
-Entering state 18
-Reading a token
-Next token is token number (1.11-14: 1111)
-Shifting token number (1.11-14: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.11-14: 1111)
--> $$ = nterm exp (1.11-14: 1111)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.15-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-7: 1111)
-   $2 = token '=' (1.9: )
-   $3 = nterm exp (1.11-14: 1111)
--> $$ = nterm exp (1.1-14: 1111)
-Entering state 8
-Next token is token '\n' (1.15-2.0: )
-Shifting token '\n' (1.15-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-14: 1111)
-   $2 = token '\n' (1.15-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of input (2.1: )
-Entering state 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1492: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
-./calc.at:1492: cat stderr
-input:
-  | (1 + # + 1) = 1111
-./calc.at:1492:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
-Reading a token
-Next token is token ')' (1.11: )
-Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 18
-Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of input (2.1: )
-Entering state 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Reading a token
-1.6: syntax error: invalid character: '#'
-Next token is token error (1.6: )
-Error: popping token '+' (1.4: )
-Error: popping nterm exp (1.2: 1)
-Shifting token error (1.2-6: )
-Entering state 11
-Next token is token error (1.6: )
-Error: discarding token error (1.6: )
-Reading a token
-Next token is token '+' (1.8: )
-Error: discarding token '+' (1.8: )
-Reading a token
-Next token is token number (1.10: 1)
-Error: discarding token number (1.10: 1)
-Reading a token
-Next token is token ')' (1.11: )
-Entering state 11
-Next token is token ')' (1.11: )
-Shifting token ')' (1.11: )
-Entering state 25
-Reducing stack 0 by rule 14 (line 105):
-   $1 = token '(' (1.1: )
-   $2 = token error (1.2-10: )
-   $3 = token ')' (1.11: )
--> $$ = nterm exp (1.1-11: 1111)
-Entering state 8
-Reading a token
-Next token is token '=' (1.13: )
-Shifting token '=' (1.13: )
-Entering state 18
-Reading a token
-Next token is token number (1.15-18: 1111)
-Shifting token number (1.15-18: 1111)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.15-18: 1111)
--> $$ = nterm exp (1.15-18: 1111)
-Entering state 27
-Reading a token
-Next token is token '\n' (1.19-2.0: )
-Reducing stack 0 by rule 6 (line 80):
-   $1 = nterm exp (1.1-11: 1111)
-   $2 = token '=' (1.13: )
-   $3 = nterm exp (1.15-18: 1111)
--> $$ = nterm exp (1.1-18: 1111)
-Entering state 8
-Next token is token '\n' (1.19-2.0: )
-Shifting token '\n' (1.19-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-18: 1111)
-   $2 = token '\n' (1.19-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of input (2.1: )
-Entering state 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1492: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -243267,410 +242687,9 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./calc.at:1492: cat stderr
-input:
-  | (1 + 1) / (1 - 1)
-./calc.at:1492:  $PREPARSER ./calc  input
-stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Reading a token
-Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 29
-Reading a token
-Next token is token ')' (1.7: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
-Entering state 8
-Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 22
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 19
-Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 28
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 31
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 10 (line 93):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
-Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of input (2.1: )
-Entering state 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-Starting parse
-Entering state 0
-Reading a token
-Next token is token '(' (1.1: )
-Shifting token '(' (1.1: )
-Entering state 4
-Reading a token
-Next token is token number (1.2: 1)
-Shifting token number (1.2: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.2: 1)
--> $$ = nterm exp (1.2: 1)
-Entering state 12
-Reading a token
-Next token is token '+' (1.4: )
-Shifting token '+' (1.4: )
-Entering state 20
-Reading a token
-Next token is token number (1.6: 1)
-Shifting token number (1.6: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.6: 1)
--> $$ = nterm exp (1.6: 1)
-Entering state 29
-Reading a token
-Next token is token ')' (1.7: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = nterm exp (1.2: 1)
-   $2 = token '+' (1.4: )
-   $3 = nterm exp (1.6: 1)
--> $$ = nterm exp (1.2-6: 2)
-Entering state 12
-Next token is token ')' (1.7: )
-Shifting token ')' (1.7: )
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' (1.1: )
-   $2 = nterm exp (1.2-6: 2)
-   $3 = token ')' (1.7: )
--> $$ = nterm exp (1.1-7: 2)
-Entering state 8
-Reading a token
-Next token is token '/' (1.9: )
-Shifting token '/' (1.9: )
-Entering state 22
-Reading a token
-Next token is token '(' (1.11: )
-Shifting token '(' (1.11: )
-Entering state 4
-Reading a token
-Next token is token number (1.12: 1)
-Shifting token number (1.12: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.12: 1)
--> $$ = nterm exp (1.12: 1)
-Entering state 12
-Reading a token
-Next token is token '-' (1.14: )
-Shifting token '-' (1.14: )
-Entering state 19
-Reading a token
-Next token is token number (1.16: 1)
-Shifting token number (1.16: 1)
-Entering state 1
-Reducing stack 0 by rule 5 (line 79):
-   $1 = token number (1.16: 1)
--> $$ = nterm exp (1.16: 1)
-Entering state 28
-Reading a token
-Next token is token ')' (1.17: )
-Reducing stack 0 by rule 8 (line 91):
-   $1 = nterm exp (1.12: 1)
-   $2 = token '-' (1.14: )
-   $3 = nterm exp (1.16: 1)
--> $$ = nterm exp (1.12-16: 0)
-Entering state 12
-Next token is token ')' (1.17: )
-Shifting token ')' (1.17: )
-Entering state 26
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token '(' (1.11: )
-   $2 = nterm exp (1.12-16: 0)
-   $3 = token ')' (1.17: )
--> $$ = nterm exp (1.11-17: 0)
-Entering state 31
-Reading a token
-Next token is token '\n' (1.18-2.0: )
-Reducing stack 0 by rule 10 (line 93):
-   $1 = nterm exp (1.1-7: 2)
-   $2 = token '/' (1.9: )
-   $3 = nterm exp (1.11-17: 0)
-1.11-17: error: null divisor
--> $$ = nterm exp (1.1-17: 2)
-Entering state 8
-Next token is token '\n' (1.18-2.0: )
-Shifting token '\n' (1.18-2.0: )
-Entering state 24
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm exp (1.1-17: 2)
-   $2 = token '\n' (1.18-2.0: )
--> $$ = nterm line (1.1-2.0: )
-Entering state 7
-Reducing stack 0 by rule 1 (line 69):
-   $1 = nterm line (1.1-2.0: )
--> $$ = nterm input (1.1-2.0: )
-Entering state 6
-Reading a token
-Now at end of input.
-Shifting token end of input (2.1: )
-Entering state 16
-Cleanup: popping token end of input (2.1: )
-Cleanup: popping nterm input (1.1-2.0: )
-./calc.at:1492: "$PERL" -pi -e 'use strict;
-  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
-  {
-    my $unexp = $1;
-    my @exps = $2 =~ /\[(.*?)\]/g;
-    ($#exps && $#exps < 4)
-    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
-    : "syntax error, unexpected $unexp";
-  }eg
-' expout || exit 77
 ./calc.at:1492: cat stderr
-568. calc.at:1492:  ok
-
-625. regression.at:25: testing Trivial grammars ...
-./regression.at:43: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./regression.at:44: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
-./existing.at:1460: sed -n 's/^State //p' input.output | tail -1
-./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
-./existing.at:1460: sed 's/^%define lr.type .*$//' input.y > input-lalr.y
-./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --report=all,no-cex input-lalr.y
-./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
-stderr:
-stdout:
-./regression.at:45: $CC $CFLAGS $CPPFLAGS  -c -o input.o -DYYDEBUG -c input.c 
-stderr:
-input.y:202.20: error: empty rule without %empty [-Werror=empty-rule]
-input.y:270.7: error: empty rule without %empty [-Werror=empty-rule]
-input.y:292.13: error: empty rule without %empty [-Werror=empty-rule]
-input.y:309.18: error: empty rule without %empty [-Werror=empty-rule]
-input.y:382.14: error: empty rule without %empty [-Werror=empty-rule]
-input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other]
-input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence]
-input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence]
-input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence]
-input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence]
-input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence]
-input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence]
-input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence]
-input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence]
-input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence]
-input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence]
-input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence]
-input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence]
-input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence]
-input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence]
-input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence]
-input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence]
-input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence]
-input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence]
-input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence]
-input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence]
-input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence]
-input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence]
-input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence]
-input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence]
-input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence]
-input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence]
-input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence]
-input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence]
-input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence]
-input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence]
-input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence]
-input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence]
-input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence]
-input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence]
-input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence]
-input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence]
-input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence]
-input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence]
-input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence]
-input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence]
-input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence]
-input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence]
-input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence]
-input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence]
-input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence]
-input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence]
-input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence]
-input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence]
-input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence]
-input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence]
-input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence]
-input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence]
-input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence]
-input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence]
-input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence]
-input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence]
-input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence]
-input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence]
-input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence]
-input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence]
-input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence]
-input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence]
-input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence]
-input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence]
-input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence]
-input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence]
-input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence]
-input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
-./existing.at:1460: sed 's,.*/$,,' stderr 1>&2
-stderr:
-input-lalr.y:471.11-48: warning: rule useless in parser due to conflicts [-Wother]
-stdout:
-./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error
-./existing.at:1460: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//'
-./existing.at:1460: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
-stderr:
-stdout:
-625. regression.at:25:  ok
-
-stderr:
-stdout:
-./existing.at:1460:  $PREPARSER ./input
-stderr:
-syntax error, unexpected LEFT
-./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-622. existing.at:1460:  ok
-
-626. regression.at:55: testing YYSTYPE typedef ...
-./regression.at:73: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-stderr:
-stdout:
-./calc.at:1494: "$PERL" -ne '
-  chomp;
-  print "$ARGV:$.: {$_}\n"
-    if (# No starting/ending empty lines.
-        (eof || $. == 1) && /^\s*$/
-        # No trailing space.
-        || /\s$/
-        # No tabs.
-        || /\t/
-        )' calc.cc calc.hh
-
-627. regression.at:85: testing Early token definitions with --yacc ...
-./regression.at:115: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --yacc -o input.c input.y
-./regression.at:74: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
-input:
-  | 1 + 2 * 3 = 7
-  | 1 + 2 * -3 = -5
-  | 
-  | -1^2 = -1
-  | (-1)^2 = 1
-  | 
-  | ---1 = -1
-  | 
-  | 1 - 2 - 3 = -4
-  | 1 - (2 - 3) = 2
-  | 
-  | 2^2^3 = 256
-  | (2^2)^3 = 64
-./calc.at:1494:  $PREPARSER ./calc  input
-stderr:
+./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
 Starting parse
 Entering state 0
 Reading a token
@@ -244507,8 +243526,15 @@
 Entering state 16
 Cleanup: popping token end of input (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
+stderr:
+stdout:
 ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./torture.at:551: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20
+input:
+stderr:
 stderr:
+  | (# + 1) = 1111
+./calc.at:1492:  $PREPARSER ./calc  input
 Starting parse
 Entering state 0
 Reading a token
@@ -245345,11 +244371,84 @@
 Entering state 16
 Cleanup: popping token end of input (14.1: )
 Cleanup: popping nterm input (1.1-14.0: )
+./torture.at:551: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
+stderr:
   | 1 2
 ./calc.at:1494:  $PREPARSER ./calc  input
-./regression.at:116: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Reading a token
+Next token is token number (1.6: 1)
+Error: discarding token number (1.6: 1)
+Reading a token
+Next token is token ')' (1.7: )
+Entering state 11
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 18
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input (2.1: )
+Entering state 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 stderr:
+stderr:
+./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./torture.at:553: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900
 Starting parse
 Entering state 0
 Reading a token
@@ -245365,7 +244464,80 @@
 1.3: syntax error, unexpected number
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token number (1.3: 2)
+stderr:
+stderr:
 ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./torture.at:553: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+1.2: syntax error: invalid character: '#'
+Next token is token error (1.2: )
+Shifting token error (1.2: )
+Entering state 11
+Next token is token error (1.2: )
+Error: discarding token error (1.2: )
+Reading a token
+Next token is token '+' (1.4: )
+Error: discarding token '+' (1.4: )
+Reading a token
+Next token is token number (1.6: 1)
+Error: discarding token number (1.6: 1)
+Reading a token
+Next token is token ')' (1.7: )
+Entering state 11
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-6: )
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.9: )
+Shifting token '=' (1.9: )
+Entering state 18
+Reading a token
+Next token is token number (1.11-14: 1111)
+Shifting token number (1.11-14: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.11-14: 1111)
+-> $$ = nterm exp (1.11-14: 1111)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.15-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-7: 1111)
+   $2 = token '=' (1.9: )
+   $3 = nterm exp (1.11-14: 1111)
+-> $$ = nterm exp (1.1-14: 1111)
+Entering state 8
+Next token is token '\n' (1.15-2.0: )
+Shifting token '\n' (1.15-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-14: 1111)
+   $2 = token '\n' (1.15-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input (2.1: )
+Entering state 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
+stderr:
 stderr:
 Starting parse
 Entering state 0
@@ -245382,6 +244554,22 @@
 1.3: syntax error, unexpected number
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token number (1.3: 2)
+./torture.at:555: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000
+stderr:
+memory exhausted
+memory exhausted
+./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none
+./torture.at:555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./calc.at:1492: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1494: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -245392,10 +244580,104 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+stderr:
+./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none
+memory exhausted
+memory exhausted
+./calc.at:1492: cat stderr
+615. torture.at:531:  ok
 ./calc.at:1494: cat stderr
 input:
+
+  | (1 + # + 1) = 1111
+./calc.at:1492:  $PREPARSER ./calc  input
+stderr:
+input:
   | 1//2
 ./calc.at:1494:  $PREPARSER ./calc  input
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Reading a token
+Next token is token number (1.10: 1)
+Error: discarding token number (1.10: 1)
+Reading a token
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 18
+Reading a token
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input (2.1: )
+Entering state 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 stderr:
 Starting parse
 Entering state 0
@@ -245417,8 +244699,93 @@
 Error: popping token '/' (1.2: )
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '/' (1.3: )
+./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+1.6: syntax error: invalid character: '#'
+Next token is token error (1.6: )
+Error: popping token '+' (1.4: )
+Error: popping nterm exp (1.2: 1)
+Shifting token error (1.2-6: )
+Entering state 11
+Next token is token error (1.6: )
+Error: discarding token error (1.6: )
+Reading a token
+Next token is token '+' (1.8: )
+Error: discarding token '+' (1.8: )
+Reading a token
+Next token is token number (1.10: 1)
+Error: discarding token number (1.10: 1)
+Reading a token
+Next token is token ')' (1.11: )
+Entering state 11
+Next token is token ')' (1.11: )
+Shifting token ')' (1.11: )
+Entering state 25
+Reducing stack 0 by rule 14 (line 105):
+   $1 = token '(' (1.1: )
+   $2 = token error (1.2-10: )
+   $3 = token ')' (1.11: )
+-> $$ = nterm exp (1.1-11: 1111)
+Entering state 8
+Reading a token
+Next token is token '=' (1.13: )
+Shifting token '=' (1.13: )
+Entering state 18
+Reading a token
+Next token is token number (1.15-18: 1111)
+Shifting token number (1.15-18: 1111)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.15-18: 1111)
+-> $$ = nterm exp (1.15-18: 1111)
+Entering state 27
+Reading a token
+Next token is token '\n' (1.19-2.0: )
+Reducing stack 0 by rule 6 (line 80):
+   $1 = nterm exp (1.1-11: 1111)
+   $2 = token '=' (1.13: )
+   $3 = nterm exp (1.15-18: 1111)
+-> $$ = nterm exp (1.1-18: 1111)
+Entering state 8
+Next token is token '\n' (1.19-2.0: )
+Shifting token '\n' (1.19-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-18: 1111)
+   $2 = token '\n' (1.19-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input (2.1: )
+Entering state 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 Starting parse
 Entering state 0
 Reading a token
@@ -245439,6 +244806,16 @@
 Error: popping token '/' (1.2: )
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '/' (1.3: )
+./calc.at:1492: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
 ./calc.at:1494: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -245449,21 +244826,261 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./calc.at:1492: cat stderr
 ./calc.at:1494: cat stderr
 input:
+  | (1 + 1) / (1 - 1)
+./calc.at:1492:  $PREPARSER ./calc  input
+input:
+620. existing.at:808: testing GNU Cim Grammar: IELR(1) ...
+./existing.at:74: sed -n 's/^State //p' input.output | tail -1
 stderr:
+./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+Next token is token number (1.6: 1)
+Shifting token number (1.6: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 29
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Reading a token
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 22
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Reading a token
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 19
+Reading a token
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 31
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 10 (line 93):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input (2.1: )
+Entering state 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
   | error
 ./calc.at:1494:  $PREPARSER ./calc  input
-stdout:
+./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./existing.at:74: sed 's/^%define lr.type .*$//' input.y > input-lalr.y
 stderr:
-627. regression.at:85:  ok
 Starting parse
 Entering state 0
 Reading a token
 Next token is token invalid token (1.1: )
 1.1: syntax error, unexpected invalid token
 Cleanup: discarding lookahead token invalid token (1.1: )
+Starting parse
+Entering state 0
+Reading a token
+Next token is token '(' (1.1: )
+Shifting token '(' (1.1: )
+Entering state 4
+Reading a token
+Next token is token number (1.2: 1)
+Shifting token number (1.2: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.2: 1)
+-> $$ = nterm exp (1.2: 1)
+Entering state 12
+Reading a token
+Next token is token '+' (1.4: )
+Shifting token '+' (1.4: )
+Entering state 20
+Reading a token
+Next token is token number (1.6: 1)
+Shifting token number (1.6: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.6: 1)
+-> $$ = nterm exp (1.6: 1)
+Entering state 29
+Reading a token
+Next token is token ')' (1.7: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = nterm exp (1.2: 1)
+   $2 = token '+' (1.4: )
+   $3 = nterm exp (1.6: 1)
+-> $$ = nterm exp (1.2-6: 2)
+Entering state 12
+Next token is token ')' (1.7: )
+Shifting token ')' (1.7: )
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' (1.1: )
+   $2 = nterm exp (1.2-6: 2)
+   $3 = token ')' (1.7: )
+-> $$ = nterm exp (1.1-7: 2)
+Entering state 8
+Reading a token
+Next token is token '/' (1.9: )
+Shifting token '/' (1.9: )
+Entering state 22
+Reading a token
+Next token is token '(' (1.11: )
+Shifting token '(' (1.11: )
+Entering state 4
+Reading a token
+Next token is token number (1.12: 1)
+Shifting token number (1.12: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.12: 1)
+-> $$ = nterm exp (1.12: 1)
+Entering state 12
+Reading a token
+Next token is token '-' (1.14: )
+Shifting token '-' (1.14: )
+Entering state 19
+Reading a token
+Next token is token number (1.16: 1)
+Shifting token number (1.16: 1)
+Entering state 1
+Reducing stack 0 by rule 5 (line 79):
+   $1 = token number (1.16: 1)
+-> $$ = nterm exp (1.16: 1)
+Entering state 28
+Reading a token
+Next token is token ')' (1.17: )
+Reducing stack 0 by rule 8 (line 91):
+   $1 = nterm exp (1.12: 1)
+   $2 = token '-' (1.14: )
+   $3 = nterm exp (1.16: 1)
+-> $$ = nterm exp (1.12-16: 0)
+Entering state 12
+Next token is token ')' (1.17: )
+Shifting token ')' (1.17: )
+Entering state 26
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token '(' (1.11: )
+   $2 = nterm exp (1.12-16: 0)
+   $3 = token ')' (1.17: )
+-> $$ = nterm exp (1.11-17: 0)
+Entering state 31
+Reading a token
+Next token is token '\n' (1.18-2.0: )
+Reducing stack 0 by rule 10 (line 93):
+   $1 = nterm exp (1.1-7: 2)
+   $2 = token '/' (1.9: )
+   $3 = nterm exp (1.11-17: 0)
+1.11-17: error: null divisor
+-> $$ = nterm exp (1.1-17: 2)
+Entering state 8
+Next token is token '\n' (1.18-2.0: )
+Shifting token '\n' (1.18-2.0: )
+Entering state 24
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm exp (1.1-17: 2)
+   $2 = token '\n' (1.18-2.0: )
+-> $$ = nterm line (1.1-2.0: )
+Entering state 7
+Reducing stack 0 by rule 1 (line 69):
+   $1 = nterm line (1.1-2.0: )
+-> $$ = nterm input (1.1-2.0: )
+Entering state 6
+Reading a token
+Now at end of input.
+Shifting token end of input (2.1: )
+Entering state 16
+Cleanup: popping token end of input (2.1: )
+Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
+./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --report=all,no-cex input-lalr.y
 stderr:
 Starting parse
 Entering state 0
@@ -245471,7 +245088,51 @@
 Next token is token invalid token (1.1: )
 1.1: syntax error, unexpected invalid token
 Cleanup: discarding lookahead token invalid token (1.1: )
-
+./calc.at:1492: "$PERL" -pi -e 'use strict;
+  s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
+  {
+    my $unexp = $1;
+    my @exps = $2 =~ /\[(.*?)\]/g;
+    ($#exps && $#exps < 4)
+    ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}"
+    : "syntax error, unexpected $unexp";
+  }eg
+' expout || exit 77
+stderr:
+./calc.at:1492: cat stderr
+input.y:128.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:137.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:142.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:161.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:179.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:205.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:213.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:225.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:292.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:294.20: error: empty rule without %empty [-Werror=empty-rule]
+input.y:367.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:373.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:387.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:401.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:413.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:443.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:471.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:474.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:489.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:506.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:587.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:591.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y: error: 78 shift/reduce conflicts [-Werror=conflicts-sr]
+input.y: error: 10 reduce/reduce conflicts [-Werror=conflicts-rr]
+input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence]
+input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence]
+input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence]
+input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence]
+input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence]
+input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence]
+input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
+./existing.at:808: sed 's,.*/$,,' stderr 1>&2
 ./calc.at:1494: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -245482,11 +245143,15 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none
+568. calc.at:1492:  ok
 ./calc.at:1494: cat stderr
+./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error
+./existing.at:74: sed -n 's/^State //p' input.output | tail -1
+
 input:
   | 1 = 2 = 3
 ./calc.at:1494:  $PREPARSER ./calc  input
+./existing.at:74: sed 's/^%define lr.type .*$//' input.y > input-lalr.y
 stderr:
 Starting parse
 Entering state 0
@@ -245517,9 +245182,9 @@
 Error: popping token '=' (1.3: )
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '=' (1.7: )
+./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --report=all,no-cex input-lalr.y
 ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-628. regression.at:127: testing Early token definitions without --yacc ...
 Starting parse
 Entering state 0
 Reading a token
@@ -245549,7 +245214,10 @@
 Error: popping token '=' (1.3: )
 Error: popping nterm exp (1.1: 1)
 Cleanup: discarding lookahead token '=' (1.7: )
-./regression.at:161: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+stderr:
+input-lalr.y: warning: 65 shift/reduce conflicts [-Wconflicts-sr]
+input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+stdout:
 ./calc.at:1494: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -245560,9 +245228,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./existing.at:74: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//'
 ./calc.at:1494: cat stderr
-./regression.at:162: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
+./existing.at:74: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 input:
+621. existing.at:808: testing GNU Cim Grammar: Canonical LR(1) ...
+./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
   | 
   | +1
 ./calc.at:1494:  $PREPARSER ./calc  input
@@ -245587,7 +245258,6 @@
 Error: popping nterm input (1.1-2.0: )
 Cleanup: discarding lookahead token '+' (2.1: )
 ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none
 stderr:
 Starting parse
 Entering state 0
@@ -245608,9 +245278,7 @@
 2.1: syntax error, unexpected '+'
 Error: popping nterm input (1.1-2.0: )
 Cleanup: discarding lookahead token '+' (2.1: )
-stderr:
-stdout:
-626. regression.at:55:  ok
+./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
 ./calc.at:1494: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -245622,8 +245290,12 @@
   }eg
 ' expout || exit 77
 ./calc.at:1494: cat stderr
-
+stderr:
+input-lalr.y: warning: 65 shift/reduce conflicts [-Wconflicts-sr]
+input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+stdout:
 ./calc.at:1494:  $PREPARSER ./calc  /dev/null
+./existing.at:74: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//'
 stderr:
 Starting parse
 Entering state 0
@@ -245631,9 +245303,10 @@
 Now at end of input.
 1.1: syntax error, unexpected end of input
 Cleanup: discarding lookahead token end of input (1.1: )
-stderr:
 ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./existing.at:74: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
+./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
 Starting parse
 Entering state 0
 Reading a token
@@ -245650,13 +245323,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stdout:
-628. regression.at:127:  ok
 ./calc.at:1494: cat stderr
 input:
   | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1
 ./calc.at:1494:  $PREPARSER ./calc  input
-
 stderr:
 Starting parse
 Entering state 0
@@ -246165,10 +245835,17 @@
   }eg
 ' expout || exit 77
 ./calc.at:1494: cat stderr
+stderr:
+stdout:
+./existing.at:74:  $PREPARSER ./input
+stderr:
+syntax error, unexpected '*', expecting NEWLINE or '{' or ';'
+./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 input:
   | (!!) + (1 2) = 1
 ./calc.at:1494:  $PREPARSER ./calc  input
 stderr:
+616. existing.at:74:  ok
 Starting parse
 Entering state 0
 Reading a token
@@ -246393,6 +246070,7 @@
 Entering state 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+
 ./calc.at:1494: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -246404,9 +246082,44 @@
   }eg
 ' expout || exit 77
 ./calc.at:1494: cat stderr
+stderr:
+input.y:128.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:137.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:142.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:161.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:179.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:205.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:213.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:225.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:292.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:294.20: error: empty rule without %empty [-Werror=empty-rule]
+input.y:367.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:373.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:387.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:401.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:413.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:443.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:471.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:474.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:489.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:506.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:587.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:591.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y: error: 78 shift/reduce conflicts [-Werror=conflicts-sr]
+input.y: error: 10 reduce/reduce conflicts [-Werror=conflicts-rr]
+input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence]
+input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence]
+input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence]
+input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence]
+input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence]
+input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence]
+input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
+./existing.at:808: sed 's,.*/$,,' stderr 1>&2
 input:
   | (- *) + (1 2) = 1
 ./calc.at:1494:  $PREPARSER ./calc  input
+./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error
 stderr:
 Starting parse
 Entering state 0
@@ -246525,9 +246238,10 @@
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-629. regression.at:173: testing Braces parsing ...
-./regression.at:185: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -v -o input.c input.y
 stderr:
+./torture.at:394: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+622. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: LALR(1) ...
+./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
 Starting parse
 Entering state 0
 Reading a token
@@ -246644,8 +246358,6 @@
 Entering state 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-630. regression.at:196: testing Rule Line Numbers ...
-./regression.at:232: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c -v input.y
 ./calc.at:1494: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -246656,15 +246368,12 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-./regression.at:187: grep 'tests = {{{{{{{{{{}}}}}}}}}};' input.c
 ./calc.at:1494: cat stderr
-stdout:
-     { tests = {{{{{{{{{{}}}}}}}}}}; }
 input:
-629. regression.at:173:  ok
   | (* *) + (*) + (*)
 ./calc.at:1494:  $PREPARSER ./calc  input
 stderr:
+stderr:
 Starting parse
 Entering state 0
 Reading a token
@@ -246780,10 +246489,45 @@
 Entering state 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-
+input.y:66.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:170.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:175.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:180.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:188.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:202.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:207.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:221.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:300.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y:323.10: error: empty rule without %empty [-Werror=empty-rule]
+input.y: error: 265 shift/reduce conflicts [-Werror=conflicts-sr]
+input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence]
+input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence]
+input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence]
+input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence]
+input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence]
+input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence]
+input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence]
+input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence]
+input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence]
+input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence]
+input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence]
+input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence]
+input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence]
+input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence]
+input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence]
+input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence]
+input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence]
+input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence]
+input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
 ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./existing.at:74: sed 's,.*/$,,' stderr 1>&2
+./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none
+stderr:
+stderr:
+stdout:
+./existing.at:74:  $PREPARSER ./input
 stderr:
-./regression.at:235: cat input.output
 Starting parse
 Entering state 0
 Reading a token
@@ -246899,7 +246643,9 @@
 Entering state 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-630. regression.at:196:  ok
+./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+617. existing.at:74:  ok
+./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error
 ./calc.at:1494: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -246911,12 +246657,10 @@
   }eg
 ' expout || exit 77
 ./calc.at:1494: cat stderr
-
 input:
   | 1 + 2 * 3 + !+ ++
+
 ./calc.at:1494:  $PREPARSER ./calc  input
-631. regression.at:345: testing Mixing %token styles ...
-./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -v -Wall -o input.c input.y
 stderr:
 Starting parse
 Entering state 0
@@ -246983,7 +246727,6 @@
    $2 = token '+' (1.14: )
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
-./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v -Wall -o input.c input.y -Werror
 ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
@@ -247054,6 +246797,7 @@
 input:
   | 1 + 2 * 3 + !- ++
 ./calc.at:1494:  $PREPARSER ./calc  input
+./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
 stderr:
 Starting parse
 Entering state 0
@@ -247187,6 +246931,8 @@
    $2 = token '-' (1.14: )
 Cleanup: popping token '+' (1.11: )
 Cleanup: popping nterm exp (1.1-9: 7)
+623. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: IELR(1) ...
+./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
 ./calc.at:1494: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -247197,14 +246943,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-stderr:
 ./calc.at:1494: cat stderr
-input.y:3.1-5: error: useless precedence and associativity for "||" [-Werror=precedence]
-input.y:3.1-5: error: useless precedence and associativity for "<=" [-Werror=precedence]
 input:
   | (#) + (#) = 2222
 ./calc.at:1494:  $PREPARSER ./calc  input
-./regression.at:357: sed 's,.*/$,,' stderr 1>&2
+./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
 stderr:
 Starting parse
 Entering state 0
@@ -247303,7 +247046,6 @@
 Entering state 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v -Wall -o input.c input.y --warnings=error
 ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
@@ -247495,6 +247237,7 @@
 Entering state 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./existing.at:808: sed -n 's/^State //p' input.output | tail -1
 ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
@@ -247574,6 +247317,8 @@
 Entering state 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+./existing.at:808: sed 's/^%define lr.type .*$//' input.y > input-lalr.y
+./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --report=all,no-cex input-lalr.y
 ./calc.at:1494: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -247584,12 +247329,11 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
-632. regression.at:437: testing Token definitions: parse.error=detailed ...
-./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -o input.c input.y
 ./calc.at:1494: cat stderr
 input:
   | (# + 1) = 1111
 ./calc.at:1494:  $PREPARSER ./calc  input
+./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none
 stderr:
 Starting parse
 Entering state 0
@@ -247731,6 +247475,95 @@
 Entering state 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
+stderr:
+input.y:202.20: error: empty rule without %empty [-Werror=empty-rule]
+input.y:270.7: error: empty rule without %empty [-Werror=empty-rule]
+input.y:292.13: error: empty rule without %empty [-Werror=empty-rule]
+input.y:309.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:382.14: error: empty rule without %empty [-Werror=empty-rule]
+input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other]
+input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence]
+input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence]
+input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence]
+input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence]
+input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence]
+input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence]
+input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence]
+input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence]
+input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence]
+input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence]
+input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence]
+input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence]
+input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence]
+input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence]
+input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence]
+input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence]
+input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence]
+input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence]
+input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence]
+input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence]
+input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence]
+input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence]
+input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence]
+input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence]
+input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence]
+input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence]
+input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence]
+input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence]
+input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence]
+input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence]
+input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence]
+input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence]
+input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence]
+input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence]
+input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence]
+input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence]
+input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence]
+input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence]
+input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence]
+input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence]
+input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence]
+input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence]
+input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence]
+input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence]
+input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence]
+input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence]
+input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence]
+input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence]
+input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence]
+input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence]
+input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence]
+input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence]
+input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence]
+input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence]
+input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence]
+input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence]
+input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence]
+input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence]
+input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence]
+input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence]
+input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence]
+input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence]
+input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence]
+input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence]
+input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence]
+input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence]
+input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
 ./calc.at:1494: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -247741,9 +247574,10 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+./existing.at:1460: sed 's,.*/$,,' stderr 1>&2
 ./calc.at:1494: cat stderr
+./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error
 input:
-./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v -Wall -o input.c input.y -Wnone,none -Werror --trace=none
   | (1 + # + 1) = 1111
 ./calc.at:1494:  $PREPARSER ./calc  input
 stderr:
@@ -247831,6 +247665,7 @@
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
 ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
 stderr:
 Starting parse
 Entering state 0
@@ -247915,7 +247750,6 @@
 Entering state 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v -Wall -o input.c input.y --warnings=none -Werror --trace=none
 ./calc.at:1494: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -247930,7 +247764,6 @@
 input:
   | (1 + 1) / (1 - 1)
 ./calc.at:1494:  $PREPARSER ./calc  input
-./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y -Werror
 stderr:
 Starting parse
 Entering state 0
@@ -248164,7 +247997,7 @@
 Entering state 16
 Cleanup: popping token end of input (2.1: )
 Cleanup: popping nterm input (1.1-2.0: )
-631. regression.at:345:  ok
+stderr:
 ./calc.at:1494: "$PERL" -pi -e 'use strict;
   s{syntax error on token \[(.*?)\] \(expected: (.*)\)}
   {
@@ -248175,10 +248008,231 @@
     : "syntax error, unexpected $unexp";
   }eg
 ' expout || exit 77
+input-lalr.y: warning: 78 shift/reduce conflicts [-Wconflicts-sr]
+input-lalr.y: warning: 10 reduce/reduce conflicts [-Wconflicts-rr]
+input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+stdout:
+./existing.at:808: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//'
 ./calc.at:1494: cat stderr
+./existing.at:808: grep '^State.*conflicts:' input.output
+./existing.at:808: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+570. calc.at:1494:  ok
+./existing.at:808: sed -n 's/^State //p' input.output | tail -1
+
+./existing.at:808: sed 's/^%define lr.type .*$//' input.y > input-lalr.y
+./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
+./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --report=all,no-cex input-lalr.y
+624. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: Canonical LR(1) ...
+./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y
+./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
+stderr:
+input.y:202.20: error: empty rule without %empty [-Werror=empty-rule]
+input.y:270.7: error: empty rule without %empty [-Werror=empty-rule]
+input.y:292.13: error: empty rule without %empty [-Werror=empty-rule]
+input.y:309.18: error: empty rule without %empty [-Werror=empty-rule]
+input.y:382.14: error: empty rule without %empty [-Werror=empty-rule]
+input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other]
+input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence]
+input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence]
+input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence]
+input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence]
+input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence]
+input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence]
+input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence]
+input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence]
+input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence]
+input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence]
+input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence]
+input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence]
+input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence]
+input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence]
+input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence]
+input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence]
+input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence]
+input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence]
+input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence]
+input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence]
+input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence]
+input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence]
+input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence]
+input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence]
+input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence]
+input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence]
+input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence]
+input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence]
+input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence]
+input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence]
+input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence]
+input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence]
+input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence]
+input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence]
+input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence]
+input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence]
+input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence]
+input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence]
+input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence]
+input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence]
+input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence]
+input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence]
+input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence]
+input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence]
+input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence]
+input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence]
+input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence]
+input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence]
+input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence]
+input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence]
+input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence]
+input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence]
+input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence]
+input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence]
+input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence]
+input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence]
+input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence]
+input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence]
+input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence]
+input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence]
+input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence]
+input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence]
+input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence]
+input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence]
+input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence]
+input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence]
+input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence]
+input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
+./existing.at:1460: sed 's,.*/$,,' stderr 1>&2
+./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error
+stderr:
+stdout:
+./existing.at:808:  $PREPARSER ./input
+stderr:
+./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+619. existing.at:808:  ok
+stderr:
+input-lalr.y: warning: 78 shift/reduce conflicts [-Wconflicts-sr]
+input-lalr.y: warning: 10 reduce/reduce conflicts [-Wconflicts-rr]
+input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+stdout:
+./existing.at:808: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//'
+./existing.at:808: grep '^State.*conflicts:' input.output
+
+./existing.at:808: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+625. regression.at:25: testing Trivial grammars ...
+./regression.at:43: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+./regression.at:44: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
+./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none
+stderr:
+stdout:
+./existing.at:808:  $PREPARSER ./input
+./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
+stderr:
+./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+620. existing.at:808:  ok
+
+stderr:
+626. regression.at:55: testing YYSTYPE typedef ...
+./regression.at:73: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+stdout:
+./regression.at:45: $CC $CFLAGS $CPPFLAGS  -c -o input.o -DYYDEBUG -c input.c 
+./regression.at:74: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
+./existing.at:1460: sed -n 's/^State //p' input.output | tail -1
+./existing.at:1460: sed 's/^%define lr.type .*$//' input.y > input-lalr.y
+./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --report=all,no-cex input-lalr.y
+./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none
+./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none
+stderr:
+stdout:
+626. regression.at:55:  ok
+
+627. regression.at:85: testing Early token definitions with --yacc ...
+./regression.at:115: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --yacc -o input.c input.y
+./regression.at:116: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
+stderr:
+input-lalr.y:471.11-48: warning: rule useless in parser due to conflicts [-Wother]
+stdout:
+./existing.at:1460: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//'
+./existing.at:1460: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./existing.at:1460: sed -n 's/^State //p' input.output | tail -1
+./existing.at:1460: sed 's/^%define lr.type .*$//' input.y > input-lalr.y
+./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --report=all,no-cex input-lalr.y
+stderr:
+stdout:
+627. regression.at:85:  ok
+
+stderr:
+stdout:
+625. regression.at:25:  ok
+
+628. regression.at:127: testing Early token definitions without --yacc ...
+./regression.at:161: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+stderr:
+stdout:
+./existing.at:1460:  $PREPARSER ./input
+629. regression.at:173: testing Braces parsing ...
+./regression.at:185: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -v -o input.c input.y
+stderr:
+syntax error, unexpected LEFT
+./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./regression.at:162: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
+622. existing.at:1460:  ok
+./regression.at:187: grep 'tests = {{{{{{{{{{}}}}}}}}}};' input.c
+stdout:
+     { tests = {{{{{{{{{{}}}}}}}}}}; }
+629. regression.at:173:  ok
+
+
+stderr:
+input-lalr.y:471.11-48: warning: rule useless in parser due to conflicts [-Wother]
+stdout:
+./existing.at:1460: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//'
+./existing.at:1460: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+631. regression.at:345: testing Mixing %token styles ...
+./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -v -Wall -o input.c input.y
+630. regression.at:196: testing Rule Line Numbers ...
+stderr:
+./regression.at:232: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c -v input.y
+stdout:
+628. regression.at:127:  ok
+./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v -Wall -o input.c input.y -Werror
+
+./existing.at:74: sed -n 's/^State //p' input.output | tail -1
+./regression.at:235: cat input.output
+630. regression.at:196:  ok
 stderr:
+input.y:3.1-5: error: useless precedence and associativity for "||" [-Werror=precedence]
+input.y:3.1-5: error: useless precedence and associativity for "<=" [-Werror=precedence]
+./regression.at:357: sed 's,.*/$,,' stderr 1>&2
 
+./existing.at:74: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v -Wall -o input.c input.y --warnings=error
+632. regression.at:437: testing Token definitions: parse.error=detailed ...
+./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -o input.c input.y
+633. regression.at:438: testing Token definitions: parse.error=verbose ...
+./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -o input.c input.y
+./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y -Werror
+./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v -Wall -o input.c input.y -Wnone,none -Werror --trace=none
+stderr:
+stdout:
+./existing.at:1460:  $PREPARSER ./input
+./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y -Werror
+stderr:
+./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+623. existing.at:1460:  ok
+stderr:
 input.y:26.8-14: error: symbol SPECIAL redeclared [-Werror=other]
    26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!"
       |        ^~~~~~~
@@ -248188,21 +248242,10 @@
 input.y:26.16-63: error: symbol "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" used more than once as a literal string [-Werror=other]
    26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!"
       |                ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-570. calc.at:1494:  ok
-./existing.at:1460: sed 's/^%define lr.type .*$//' input.y > input-lalr.y
+./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -v -Wall -o input.c input.y --warnings=none -Werror --trace=none
 ./regression.at:437: sed 's,.*/$,,' stderr 1>&2
-./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --report=all,no-cex input-lalr.y
 ./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error
 
-./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none
-633. regression.at:438: testing Token definitions: parse.error=verbose ...
-./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -o input.c input.y
-634. regression.at:447: testing Characters Escapes ...
-./regression.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none
-./regression.at:466: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
-./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y -Werror
-./regression.at:437: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 input.y:26.8-14: error: symbol SPECIAL redeclared [-Werror=other]
    26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!"
@@ -248213,27 +248256,34 @@
 input.y:26.16-63: error: symbol "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" used more than once as a literal string [-Werror=other]
    26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!"
       |                ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+631. regression.at:345:  ok
 ./regression.at:438: sed 's,.*/$,,' stderr 1>&2
+./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none
+
 ./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error
-stderr:
-input-lalr.y:471.11-48: warning: rule useless in parser due to conflicts [-Wother]
-stdout:
-./existing.at:1460: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//'
-./existing.at:1460: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+634. regression.at:447: testing Characters Escapes ...
+./regression.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none
 ./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none
 ./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none
-stderr:
-stdout:
-634. regression.at:447:  ok
-
-./regression.at:438: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 635. regression.at:480: testing Web2c Report ...
+./regression.at:466: $CC $CFLAGS $CPPFLAGS  -c -o input.o input.c 
 ./regression.at:505: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -v input.y
+./regression.at:438: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./regression.at:437: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./regression.at:506: cat input.output
 635. regression.at:480:  ok
 
+stderr:
+stdout:
+634. regression.at:447:  ok
 636. regression.at:661: testing Web2c Actions ...
 ./regression.at:674: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -v -o input.c input.y
+
+./regression.at:679: cat tables.c
+636. regression.at:661:  ok
+637. regression.at:812: testing Useless Tokens ...
+./regression.at:912: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -o input.c input.y
 stderr:
 stdout:
 ./regression.at:437:  $PREPARSER ./input
@@ -248241,44 +248291,42 @@
 syntax error, unexpected a, expecting ∃¬∩∪∀
 ./regression.at:437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 632. regression.at:437:  ok
-./regression.at:679: cat tables.c
-636. regression.at:661:  ok
 
 
-./existing.at:74: sed -n 's/^State //p' input.output | tail -1
-637. regression.at:812: testing Useless Tokens ...
-./regression.at:912: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-other -o input.c input.y
-./existing.at:74: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-638. regression.at:1143: testing Dancer  ...
-./regression.at:1143: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o dancer.c dancer.y
 stderr:
+639. regression.at:1144: testing Dancer %glr-parser ...
+./regression.at:1144: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o dancer.c dancer.y
+638. regression.at:1143: testing Dancer  ...
 stdout:
+./regression.at:1143: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o dancer.c dancer.y
 ./regression.at:438:  $PREPARSER ./input
-./regression.at:1143: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o dancer dancer.c $LIBS
 stderr:
 syntax error, unexpected a, expecting "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!"
 ./regression.at:438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 633. regression.at:438:  ok
+./regression.at:1144: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o dancer dancer.c $LIBS
+./regression.at:917: cat tables.c
+./regression.at:1143: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o dancer dancer.c $LIBS
 
 stderr:
+637. regression.at:812:  ok
 stdout:
-639. regression.at:1144: testing Dancer %glr-parser ...
-./regression.at:1144: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o dancer.c dancer.y
-./existing.at:1460:  $PREPARSER ./input
+./existing.at:74:  $PREPARSER ./input
 stderr:
-./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-623. existing.at:1460:  ok
-./regression.at:917: cat tables.c
+./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+618. existing.at:74:  ok
 
-637. regression.at:812:  ok
 
-./regression.at:1144: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o dancer dancer.c $LIBS
 640. regression.at:1145: testing Dancer lalr1.cc ...
 ./regression.at:1145: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o dancer.cc dancer.y
 641. regression.at:1220: testing Expecting two tokens  ...
 ./regression.at:1220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o expect2.c expect2.y
-./regression.at:1145: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o dancer dancer.cc $LIBS
+642. regression.at:1221: testing Expecting two tokens %glr-parser ...
+./regression.at:1221: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o expect2.c expect2.y
 ./regression.at:1220: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o expect2 expect2.c $LIBS
+./regression.at:1145: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o dancer dancer.cc $LIBS
+./regression.at:1221: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o expect2 expect2.c $LIBS
+./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
 stderr:
 stdout:
 ./regression.at:1143:  $PREPARSER ./dancer
@@ -248287,9 +248335,9 @@
 ./regression.at:1143: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 638. regression.at:1143:  ok
 
-642. regression.at:1221: testing Expecting two tokens %glr-parser ...
-./regression.at:1221: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o expect2.c expect2.y
-./regression.at:1221: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o expect2 expect2.c $LIBS
+643. regression.at:1222: testing Expecting two tokens lalr1.cc ...
+./regression.at:1222: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o expect2.cc expect2.y
+./regression.at:1222: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o expect2 expect2.cc $LIBS
 stderr:
 stdout:
 ./regression.at:1220:  $PREPARSER ./expect2
@@ -248298,17 +248346,6 @@
 ./regression.at:1220: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 641. regression.at:1220:  ok
 
-643. regression.at:1222: testing Expecting two tokens lalr1.cc ...
-./regression.at:1222: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o expect2.cc expect2.y
-./regression.at:1222: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o expect2 expect2.cc $LIBS
-stderr:
-stdout:
-./existing.at:74:  $PREPARSER ./input
-stderr:
-./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-618. existing.at:74:  ok
-
-./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
 644. regression.at:1230: testing Braced code in declaration in rules section ...
 ./regression.at:1261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 ./regression.at:1262: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
@@ -248322,13 +248359,26 @@
 
 645. regression.at:1291: testing String alias declared after use ...
 ./regression.at:1304: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+645. regression.at:1291:  ok
+
 stderr:
 stdout:
+./regression.at:1221:  $PREPARSER ./expect2
+stderr:
+syntax error, unexpected '+', expecting A or B
+./regression.at:1221: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+642. regression.at:1221:  ok
+646. regression.at:1314: testing Extra lookahead sets in report ...
+./regression.at:1329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --report=all input.y
+
+./regression.at:1330: sed -n '/^State 1$/,/^State 2$/p' input.output
+646. regression.at:1314:  ok
+647. regression.at:1355: testing Token number in precedence declaration ...
 stderr:
-./regression.at:1263:  $PREPARSER ./input --debug
 stdout:
+./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall -o input.c input.y
+./regression.at:1263:  $PREPARSER ./input --debug
 stderr:
-./regression.at:1221:  $PREPARSER ./expect2
 Starting parse
 Entering state 0
 Stack now 0
@@ -248344,36 +248394,24 @@
 Cleanup: discarding lookahead token 'a' (PRINTER)
 DESTRUCTOR
 Stack now 0
-stderr:
+
 ./regression.at:1263: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-syntax error, unexpected '+', expecting A or B
-./regression.at:1221: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-645. regression.at:1291:  ok
 644. regression.at:1230:  ok
-642. regression.at:1221:  ok
-
-
+./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
 
+./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall -o input.c input.y -Werror
+649. regression.at:1430: testing parse.error=verbose and YYSTACK_USE_ALLOCA ...
+./regression.at:1481: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 648. regression.at:1408: testing parse-gram.y: LALR = IELR ...
 ./regression.at:1414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c -Dlr.type=lalr input.y
-646. regression.at:1314: testing Extra lookahead sets in report ...
-./regression.at:1329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret --report=all input.y
-647. regression.at:1355: testing Token number in precedence declaration ...
-./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wall -o input.c input.y
-./regression.at:1330: sed -n '/^State 1$/,/^State 2$/p' input.output
-646. regression.at:1314:  ok
-
-./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall -o input.c input.y -Werror
 stderr:
 input.y:24.5-19: error: rule useless in parser due to conflicts [-Werror=other]
 input.y:28.5-19: error: rule useless in parser due to conflicts [-Werror=other]
 input.y:18.1-5: error: useless precedence and associativity for TK1 [-Werror=precedence]
 ./regression.at:1388: sed 's,.*/$,,' stderr 1>&2
+./regression.at:1482: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall -o input.c input.y --warnings=error
-649. regression.at:1430: testing parse.error=verbose and YYSTACK_USE_ALLOCA ...
-./regression.at:1481: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 ./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall -o input.c input.y -Wnone,none -Werror --trace=none
-./regression.at:1482: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall -o input.c input.y --warnings=none -Werror --trace=none
 ./regression.at:1393: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./regression.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c -Dlr.type=ielr input.y
@@ -248384,47 +248422,51 @@
 syntax error, unexpected 'a', expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B
 syntax error, unexpected end of file, expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B
 ./regression.at:1483: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-649. regression.at:1430:  ok
-./regression.at:1420: diff lalr.c ielr.c
-648. regression.at:1408:  ok
-
-
-stderr:
-stdout:
-./regression.at:1394:  $PREPARSER ./input
-stderr:
 stderr:
 stdout:
 ./regression.at:1145:  $PREPARSER ./dancer
-./regression.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-647. regression.at:1355:  ok
-650. regression.at:1504: testing parse.error=verbose overflow ...
-./regression.at:1604: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+649. regression.at:1430:  ok
 syntax error, unexpected ':'
 ./regression.at:1145: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
 640. regression.at:1145:  ok
+
+650. regression.at:1504: testing parse.error=verbose overflow ...
+./regression.at:1604: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+stderr:
+stdout:
+./regression.at:1222:  $PREPARSER ./expect2
+stderr:
+syntax error, unexpected '+', expecting A or B
+./regression.at:1222: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./regression.at:1611: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+643. regression.at:1222:  ok
 651. regression.at:1628: testing LAC: Exploratory stack ...
 ./regression.at:1713: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dparse.lac=full \
                  -Dparse.lac.es-capacity-initial=1 \
                  -Dparse.lac.memory-trace=full -o input.c input.y
+stderr:
+stdout:
+./regression.at:1394:  $PREPARSER ./input
+
+stderr:
+./regression.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+647. regression.at:1355:  ok
 
+./regression.at:1420: diff lalr.c ielr.c
+648. regression.at:1408:  ok
 
+653. regression.at:1874: testing Lex and parse params: yacc.c ...
+./regression.at:1874: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 ./regression.at:1713: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./regression.at:1611: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 652. regression.at:1739: testing LAC: Memory exhaustion ...
 ./regression.at:1771: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -o input.c input.y
-653. regression.at:1874: testing Lex and parse params: yacc.c ...
-./regression.at:1874: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
-./regression.at:1771: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./regression.at:1874: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-stderr:
-stdout:
-./regression.at:1874:  $PREPARSER ./input
-stderr:
-./regression.at:1874: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-653. regression.at:1874:  ok
-
+./regression.at:1771: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+654. regression.at:1875: testing Lex and parse params: glr.c ...
+./regression.at:1875: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+./regression.at:1875: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 input.y: In function 'yyparse':
 input.y:59:24: warning: format '%d' expects argument of type 'int', but argument 3 has type 'long int' [-Wformat=]
@@ -248448,46 +248490,19 @@
 ./regression.at:1613: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 650. regression.at:1504:  ok
 
-654. regression.at:1875: testing Lex and parse params: glr.c ...
-./regression.at:1875: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
-655. regression.at:1876: testing Lex and parse params: lalr1.cc ...
-./regression.at:1876: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./regression.at:1875: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./regression.at:1876: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./regression.at:1222:  $PREPARSER ./expect2
+./regression.at:1874:  $PREPARSER ./input
+655. regression.at:1876: testing Lex and parse params: lalr1.cc ...
+./regression.at:1876: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
 stderr:
-syntax error, unexpected '+', expecting A or B
-./regression.at:1222: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-643. regression.at:1222:  ok
+./regression.at:1874: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+653. regression.at:1874:  ok
+./regression.at:1876: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 
 656. regression.at:1877: testing Lex and parse params: glr.cc ...
 ./regression.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./regression.at:1877: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./regression.at:1771: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-stderr:
-stdout:
-./regression.at:1713:  $PREPARSER ./input --debug > stdout.txt 2> stderr.txt
-stderr:
-./regression.at:1713: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./regression.at:1713: grep 'syntax error,' stderr.txt
-./regression.at:1713: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt
-./regression.at:1713: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt
-./regression.at:1713: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt
-./regression.at:1714: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dparse.lac=full \
-                 -Dparse.lac.es-capacity-initial=1 \
-                 -Dparse.lac.memory-trace=full -o input.c input.y
-./regression.at:1714: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror
 stderr:
-stdout:
-./regression.at:1875:  $PREPARSER ./input
-stderr:
-./regression.at:1875: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-654. regression.at:1875:  ok
 stderr:
 input.y:202.20: error: empty rule without %empty [-Werror=empty-rule]
 input.y:270.7: error: empty rule without %empty [-Werror=empty-rule]
@@ -248577,10 +248592,36 @@
 input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence]
 input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence]
 input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
+stdout:
+./regression.at:1713:  $PREPARSER ./input --debug > stdout.txt 2> stderr.txt
+./existing.at:1460: sed 's,.*/$,,' stderr 1>&2
+stderr:
+./regression.at:1713: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./regression.at:1713: grep 'syntax error,' stderr.txt
+./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error
+./regression.at:1877: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+./regression.at:1713: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt
+stdout:
+./regression.at:1771: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./regression.at:1713: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt
+./regression.at:1713: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt
+./regression.at:1714: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dparse.lac=full \
+                 -Dparse.lac.es-capacity-initial=1 \
+                 -Dparse.lac.memory-trace=full -o input.c input.y
+./regression.at:1714: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+stderr:
+stdout:
+./regression.at:1875:  $PREPARSER ./input
 stderr:
+./regression.at:1875: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+654. regression.at:1875:  ok
 
+657. regression.at:1878: testing Lex and parse params: glr2.cc ...
+./regression.at:1878: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+stderr:
+./regression.at:1878: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stdout:
-./existing.at:1460: sed 's,.*/$,,' stderr 1>&2
 ./regression.at:1772:  $PREPARSER ./input --debug
 stderr:
 Starting parse
@@ -248594,8 +248635,8 @@
 Cleanup: discarding lookahead token "end of file" ()
 Stack now 0
 ./regression.at:1772: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error
 ./regression.at:1787: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -o input.c input.y
+./regression.at:1787: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
 ./regression.at:1714:  $PREPARSER ./input --debug > stdout.txt 2> stderr.txt
@@ -248603,60 +248644,45 @@
 ./regression.at:1714: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./regression.at:1714: grep 'syntax error,' stderr.txt
 ./regression.at:1714: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt
-./regression.at:1787: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./regression.at:1714: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt
 ./regression.at:1714: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt
-657. regression.at:1878: testing Lex and parse params: glr2.cc ...
-./regression.at:1878: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
 ./regression.at:1715: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dparse.lac=full \
                  -Dparse.lac.es-capacity-initial=1 \
                  -Dparse.lac.memory-trace=full -o input.c input.y
-./regression.at:1878: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 ./regression.at:1715: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
 ./regression.at:1787: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
-./regression.at:1715:  $PREPARSER ./input --debug > stdout.txt 2> stderr.txt
-stderr:
-./regression.at:1715: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./regression.at:1715: grep 'syntax error,' stderr.txt
-./regression.at:1715: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt
-./regression.at:1715: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt
-./regression.at:1715: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt
-./regression.at:1716: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dparse.lac=full \
-                 -Dparse.lac.es-capacity-initial=1 \
-                 -Dparse.lac.memory-trace=full -o input.c input.y
-./regression.at:1716: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-stderr:
-stdout:
 ./regression.at:1876:  $PREPARSER ./input
 stderr:
 ./regression.at:1876: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stdout:
 655. regression.at:1876:  ok
+./regression.at:1877:  $PREPARSER ./input
+stderr:
+./regression.at:1877: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+
+656. regression.at:1877:  ok
 
 658. regression.at:1889: testing stdio.h is not needed ...
 ./regression.at:1906: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 ./regression.at:1906: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-stderr:
-stdout:
-658. regression.at:1889:  ok
-
 659. push.at:25: testing Memory Leak for Early Deletion ...
 ./push.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 stderr:
-./push.at:75: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stdout:
-./regression.at:1877:  $PREPARSER ./input
+./regression.at:1715:  $PREPARSER ./input --debug > stdout.txt 2> stderr.txt
+./push.at:75: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
-./regression.at:1877: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-656. regression.at:1877:  ok
-
-660. push.at:84: testing Multiple impure instances ...
-./push.at:134: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
-./push.at:134: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+./regression.at:1715: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./regression.at:1715: grep 'syntax error,' stderr.txt
+./regression.at:1715: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt
+./regression.at:1715: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt
 stderr:
+./regression.at:1715: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt
 stdout:
 ./regression.at:1788:  $PREPARSER ./input --debug
 stderr:
@@ -248675,62 +248701,81 @@
 Stack now 0
 ./regression.at:1788: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 652. regression.at:1739:  ok
+./regression.at:1716: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dparse.lac=full \
+                 -Dparse.lac.es-capacity-initial=1 \
+                 -Dparse.lac.memory-trace=full -o input.c input.y
+stderr:
+stdout:
 
-661. push.at:145: testing Unsupported Skeletons ...
-./push.at:156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
+658. regression.at:1889:  ok
+
+./regression.at:1716: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+660. push.at:84: testing Multiple impure instances ...
+./push.at:134: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
 stderr:
 stdout:
 ./push.at:76:  $PREPARSER ./input
-661. push.at:145:  ok
 stderr:
 ./push.at:76: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 659. push.at:25:  ok
 
-
+./push.at:134: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+661. push.at:145: testing Unsupported Skeletons ...
+./push.at:156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret input.y
 662. push.at:167: testing Pstate reuse ...
-663. c++.at:26: testing C++ Locations Unit Tests ...
 ./push.at:276: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+661. push.at:145:  ok
+
+stderr:
+stdout:
+./torture.at:395:  $PREPARSER ./input
+./push.at:276: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+stderr:
+./torture.at:395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+613. torture.at:385:  ok
+663. c++.at:26: testing C++ Locations Unit Tests ...
 ======== Testing with C++ standard flags: ''
 ./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+
+./c++.at:92: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./push.at:134:  $PREPARSER ./input
-./c++.at:92: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
-./push.at:276: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+664. c++.at:107: testing C++ Variant-based Symbols Unit Tests ...
+./c++.at:234: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o list.cc list.yy
 ./push.at:134: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./push.at:135: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.c input.y
+./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
+./push.at:135: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+======== Testing with C++ standard flags: ''
+./c++.at:235: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
+./push.at:277: ./input
+stderr:
+stdout:
+662. push.at:167:  ok
 ./regression.at:1716:  $PREPARSER ./input --debug > stdout.txt 2> stderr.txt
 stderr:
 ./regression.at:1716: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./push.at:135: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 ./regression.at:1716: grep 'syntax error,' stderr.txt
+
 ./regression.at:1716: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt
 ./regression.at:1716: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt
 ./regression.at:1716: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt
 ./regression.at:1719: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dparse.lac=full -o input.cc input.y
-./regression.at:1719: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
+665. c++.at:247: testing Multiple occurrences of $n and api.value.automove ...
+./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.yy
 ./push.at:135:  $PREPARSER ./input
 stderr:
 ./push.at:135: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./regression.at:1719: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 660. push.at:84:  ok
-
-664. c++.at:107: testing C++ Variant-based Symbols Unit Tests ...
-./c++.at:234: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o list.cc list.yy
-======== Testing with C++ standard flags: ''
-./c++.at:235: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./push.at:277: ./input
-662. push.at:167:  ok
-
-665. c++.at:247: testing Multiple occurrences of $n and api.value.automove ...
-./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret input.yy
 ./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.yy -Werror
+
 stderr:
 input.yy:16.33-34: error: multiple occurrences of $2 with api.value.automove [-Werror=other]
    16 | | "twice" exp       { $$ = $2 + $2; }
@@ -248742,62 +248787,20 @@
    17 | | "thrice" exp[val] { $$ = $2 + $val + $2; }
       |                                        ^~
 ./c++.at:263: sed 's,.*/$,,' stderr 1>&2
-./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.yy --warnings=error
-./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.yy -Wnone,none -Werror --trace=none
-./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.yy --warnings=none -Werror --trace=none
-665. c++.at:247:  ok
-
 666. c++.at:566: testing Variants lalr1.cc ...
 ======== Testing with C++ standard flags: ''
 ./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.yy --warnings=error
+./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.yy -Wnone,none -Werror --trace=none
 ./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
-stderr:
-stdout:
-./torture.at:395:  $PREPARSER ./input
-stderr:
-./torture.at:395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-613. torture.at:385:  ok
+./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret input.yy --warnings=none -Werror --trace=none
+665. c++.at:247:  ok
 
 667. c++.at:567: testing Variants lalr1.cc parse.assert ...
 ======== Testing with C++ standard flags: ''
 ./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
 ./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
-stdout:
-./c++.at:92:  $PREPARSER ./input
-stderr:
-./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./c++.at:92: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./regression.at:1719:  $PREPARSER ./input --debug > stdout.txt 2> stderr.txt
-stderr:
-stdout:
-./regression.at:1878:  $PREPARSER ./input
-stderr:
-stderr:
-./regression.at:1719: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./regression.at:1878: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./regression.at:1719: grep 'syntax error,' stderr.txt
-657. regression.at:1878:  ok
-./regression.at:1719: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt
-./regression.at:1719: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt
-
-./regression.at:1727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dparse.lac=full -o input.java input.y
-651. regression.at:1628:  skipped (regression.at:1727)
-668. c++.at:568: testing Variants lalr1.cc parse.assert api.value.automove ...
-======== Testing with C++ standard flags: ''
-./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-
-./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-669. c++.at:569: testing Variants lalr1.cc parse.assert %locations ...
-======== Testing with C++ standard flags: ''
-./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
 input.y:128.18: error: empty rule without %empty [-Werror=empty-rule]
 input.y:137.18: error: empty rule without %empty [-Werror=empty-rule]
 input.y:142.18: error: empty rule without %empty [-Werror=empty-rule]
@@ -248832,26 +248835,60 @@
 input.y: error: fix-its can be applied.  Rerun with option '--update'. [-Werror=other]
 ./existing.at:808: sed 's,.*/$,,' stderr 1>&2
 ./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error
+stderr:
+stdout:
+./regression.at:1878:  $PREPARSER ./input
+stderr:
+./regression.at:1878: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+657. regression.at:1878:  ok
+
+668. c++.at:568: testing Variants lalr1.cc parse.assert api.value.automove ...
+======== Testing with C++ standard flags: ''
+./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+stderr:
+stdout:
+./c++.at:92:  $PREPARSER ./input
+stderr:
+./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+./c++.at:92: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./regression.at:1719:  $PREPARSER ./input --debug > stdout.txt 2> stderr.txt
+stderr:
+./regression.at:1719: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./regression.at:1719: grep 'syntax error,' stderr.txt
+./regression.at:1719: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt
+./regression.at:1719: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt
+./regression.at:1727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dparse.lac=full -o input.java input.y
+651. regression.at:1628:  skipped (regression.at:1727)
+
 ./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none
+669. c++.at:569: testing Variants lalr1.cc parse.assert %locations ...
 stderr:
+======== Testing with C++ standard flags: ''
+./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
 stdout:
 ./c++.at:235:  $PREPARSER ./list
 stderr:
 ./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:235: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
 ./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
+./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
+stderr:
 ./c++.at:92:  $PREPARSER ./input
 stderr:
 ./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./c++.at:92: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
 stdout:
 ./c++.at:566: $here/modern
 stdout:
@@ -248879,29 +248916,16 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
+======== Testing with C++ standard flags: ''
+./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
 ./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:92: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 ./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
 ./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-./existing.at:1460: sed -n 's/^State //p' input.output | tail -1
-stderr:
-stdout:
-./c++.at:92:  $PREPARSER ./input
-stderr:
-./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./c++.at:92: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-./existing.at:1460: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
-stderr:
-stdout:
-./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
 ./c++.at:567: $here/modern
@@ -248933,14 +248957,12 @@
 ./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./existing.at:1460: sed -n 's/^State //p' input.output | tail -1
 ./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./existing.at:1460: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
-./c++.at:235:  $PREPARSER ./list
-stderr:
-./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:235: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
 ./c++.at:568: $here/modern
@@ -248975,6 +248997,21 @@
 ./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
+./c++.at:92:  $PREPARSER ./input
+stderr:
+./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+./c++.at:92: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./c++.at:235:  $PREPARSER ./list
+stderr:
+./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:235: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+stderr:
+stdout:
 ./c++.at:569: $here/modern
 stdout:
 Modern C++: 201703
@@ -249004,19 +249041,25 @@
 ./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
 ./existing.at:1460:  $PREPARSER ./input
 stderr:
-stderr:
 ./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+624. existing.at:1460:  ok
+
+./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+670. c++.at:570: testing Variants lalr1.cc parse.assert %code {\n#define TWO_STAGE_BUILD\n} ...
+======== Testing with C++ standard flags: ''
+./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
+stderr:
 stdout:
-624. existing.at:1460: ./c++.at:566: $here/modern
- ok
+./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
+./c++.at:566: $here/modern
 stdout:
 Legac++
 ./c++.at:566:  $PREPARSER ./list
@@ -249043,37 +249086,26 @@
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
 ./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-
 ======== Testing with C++ standard flags: ''
 ./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-670. c++.at:570: testing Variants lalr1.cc parse.assert %code {\n#define TWO_STAGE_BUILD\n} ...
-======== Testing with C++ standard flags: ''
-./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:92:  $PREPARSER ./input
-stderr:
-./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./c++.at:92: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none
+./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:235:  $PREPARSER ./list
-stderr:
-./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:235: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
-./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:92:  $PREPARSER ./input
 stderr:
+stderr:
+./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stdout:
 ./c++.at:567: $here/modern
 stdout:
+======== Testing with C++ standard flags: ''
+./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
 Legac++
 ./c++.at:567:  $PREPARSER ./list
 stderr:
@@ -249099,26 +249131,9 @@
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
 ./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:92: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 ======== Testing with C++ standard flags: ''
 ./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
-./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
-./c++.at:92:  $PREPARSER ./input
-stderr:
-./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./c++.at:92: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
 ./c++.at:568: $here/modern
@@ -249148,41 +249163,20 @@
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
 ./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 ======== Testing with C++ standard flags: ''
 ./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:566: $here/modern
-stdout:
-Legac++
-./c++.at:566:  $PREPARSER ./list
+./c++.at:235:  $PREPARSER ./list
 stderr:
-Destroy: "0"
-Destroy: "0"
-Destroy: 1
-Destroy: "1"
-Destroy: (0)
-Destroy: "2"
-Destroy: "2"
-Destroy: (0, 1)
-Destroy: ""
-Destroy: 3
-Destroy: (0, 1, 2)
-Destroy: "4"
-Destroy: "4"
-Destroy: (0, 1, 2)
-Destroy: (0, 1, 2, 4)
-Destroy: 5
-Destroy: (0, 1, 2, 4)
-Destroy: "6"
-Destroy: "6"
-Destroy: (0, 1, 2, 4)
-Destroy: (0, 1, 2, 4, 6)
-./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:235: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+stderr:
+stdout:
+./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
 ./c++.at:569: $here/modern
@@ -249220,63 +249214,20 @@
 ./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
-./c++.at:570: $here/modern
-stdout:
-Modern C++: 201703
-./c++.at:570:  $PREPARSER ./list
-stderr:
-Destroy: "0"
-Destroy: "0"
-Destroy: 1
-Destroy: "1"
-Destroy: (0)
-Destroy: "2"
-Destroy: "2"
-Destroy: (0, 1)
-Destroy: ""
-Destroy: 3
-Destroy: (0, 1, 2)
-Destroy: "4"
-Destroy: "4"
-Destroy: (0, 1, 2)
-Destroy: (0, 1, 2, 4)
-Destroy: 5
-Destroy: (0, 1, 2, 4)
-Destroy: "6"
-Destroy: "6"
-Destroy: (0, 1, 2, 4)
-Destroy: (0, 1, 2, 4, 6)
-./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:235:  $PREPARSER ./list
-stderr:
-./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:235: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
 ./c++.at:92:  $PREPARSER ./input
 stderr:
 ./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ======== Testing with C++ standard flags: ''
 ./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./c++.at:92: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
-./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
 stdout:
-./c++.at:567: $here/modern
+./c++.at:570: $here/modern
 stdout:
-Legac++
-./c++.at:567:  $PREPARSER ./list
+Modern C++: 201703
+./c++.at:570:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -249299,23 +249250,16 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none
+./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+======== Testing with C++ standard flags: ''
+./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
 stdout:
-stderr:
+./c++.at:92: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 ./c++.at:566: $here/modern
 stdout:
-stdout:
-Modern C++: 201103
+Legac++
 ./c++.at:566:  $PREPARSER ./list
-./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -249341,6 +249285,15 @@
 ./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+stderr:
+stdout:
+./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
+./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none
 stderr:
 stdout:
 ./c++.at:568: $here/modern
@@ -249372,14 +249325,20 @@
 ./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 ./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:569: $here/modern
+./c++.at:235:  $PREPARSER ./list
+stderr:
+./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:235: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+stderr:
+stdout:
+./c++.at:567: $here/modern
 stdout:
 Legac++
-./c++.at:569:  $PREPARSER ./list
+./c++.at:567:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -249402,20 +249361,13 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
 stderr:
 stdout:
-./c++.at:235:  $PREPARSER ./list
-stderr:
-./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:235: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
 ./c++.at:92:  $PREPARSER ./input
@@ -249426,10 +249378,13 @@
 ./c++.at:92: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:570: $here/modern
+stderr:
+stdout:
+./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:569: $here/modern
 stdout:
 Legac++
-./c++.at:570:  $PREPARSER ./list
+./c++.at:569:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -249452,25 +249407,19 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
-./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
-./c++.at:567: $here/modern
+./c++.at:566: $here/modern
 stdout:
 Modern C++: 201103
-./c++.at:567:  $PREPARSER ./list
+./c++.at:566:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -249493,16 +249442,16 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:566: $here/modern
+./c++.at:570: $here/modern
 stdout:
-Modern C++: 201402
-./c++.at:566:  $PREPARSER ./list
+Legac++
+./c++.at:570:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -249525,20 +249474,31 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
+./c++.at:235:  $PREPARSER ./list
+stderr:
+./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:235: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
+stderr:
+stdout:
+./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 ./c++.at:92:  $PREPARSER ./input
 stderr:
 ./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-663. c++.at:26:  ok
-
+======== Testing with C++ standard flags: ''
+./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+./c++.at:92: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./c++.at:568: $here/modern
@@ -249570,24 +249530,13 @@
 ./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-671. c++.at:571: testing Variants lalr1.cc parse.assert api.token.constructor ...
-======== Testing with C++ standard flags: ''
-./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
 ./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:235:  $PREPARSER ./list
-stderr:
-./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:235: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:569: $here/modern
+./c++.at:567: $here/modern
 stdout:
 Modern C++: 201103
-./c++.at:569:  $PREPARSER ./list
+./c++.at:567:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -249610,21 +249559,25 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-./existing.at:808: sed -n 's/^State //p' input.output | tail -1
+./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+stderr:
+stdout:
+./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+stderr:
+stdout:
+./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
 ./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-./existing.at:808: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
-./c++.at:570: $here/modern
+./c++.at:569: $here/modern
 stdout:
-Legac++
-./c++.at:570:  $PREPARSER ./list
+Modern C++: 201103
+./c++.at:569:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -249647,36 +249600,16 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
-./existing.at:808:  $PREPARSER ./input
-stderr:
-./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-621. existing.at:808:  ok
-
-stderr:
-stdout:
-./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-672. c++.at:572: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} ...
+./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:567: $here/modern
+./c++.at:566: $here/modern
 stdout:
 Modern C++: 201402
-./c++.at:567:  $PREPARSER ./list
+./c++.at:566:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -249699,19 +249632,16 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
+./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stdout:
-./c++.at:566: $here/modern
+./c++.at:570: $here/modern
 stdout:
-Modern C++: 201703
-./c++.at:566:  $PREPARSER ./list
-stderr:
+======== Testing with C++ standard flags: ''
+./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+Legac++
+./c++.at:570:  $PREPARSER ./list
 stderr:
-stdout:
 Destroy: "0"
 Destroy: "0"
 Destroy: 1
@@ -249733,11 +249663,13 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./existing.at:808: sed -n 's/^State //p' input.output | tail -1
 ./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./existing.at:808: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
 stderr:
 stdout:
 ./c++.at:235:  $PREPARSER ./list
@@ -249747,6 +249679,20 @@
 ./c++.at:235: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
+./c++.at:92:  $PREPARSER ./input
+stderr:
+./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+663. c++.at:26:  ok
+
+671. c++.at:571: testing Variants lalr1.cc parse.assert api.token.constructor ...
+======== Testing with C++ standard flags: ''
+./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+stderr:
+stdout:
+./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
 ./c++.at:568: $here/modern
 stdout:
 Modern C++: 201402
@@ -249774,18 +249720,38 @@
 Destroy: ()
 Destroy: (0, 1, 2, 4, 6)
 ./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stdout:
+./existing.at:808:  $PREPARSER ./input
+stderr:
 ======== Testing with C++ standard flags: ''
 ./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+621. existing.at:808:  ok
+
 ./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+672. c++.at:572: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} ...
+======== Testing with C++ standard flags: ''
+./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+stderr:
+stdout:
+./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
 ./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
-./c++.at:571: $here/modern
+./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
 stdout:
-Modern C++: 201703
-./c++.at:571:  $PREPARSER ./list
+./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
+./c++.at:567: $here/modern
+stdout:
+Modern C++: 201402
+./c++.at:567:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -249808,13 +249774,10 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
 ./c++.at:569: $here/modern
@@ -249878,33 +249841,13 @@
 ./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
-./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
-./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
-./c++.at:235:  $PREPARSER ./list
-stderr:
-./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-664. c++.at:107:  ok
-
-673. c++.at:573: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} %locations ...
-======== Testing with C++ standard flags: ''
-./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:572: $here/modern
+./c++.at:566: $here/modern
 stdout:
+./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 Modern C++: 201703
-./c++.at:572:  $PREPARSER ./list
+./c++.at:566:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -249927,54 +249870,29 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
 stderr:
 stdout:
-./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:567: $here/modern
+./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
 stdout:
-Modern C++: 201703
-./c++.at:567:  $PREPARSER ./list
+./c++.at:235:  $PREPARSER ./list
 stderr:
-Destroy: "0"
-Destroy: "0"
-Destroy: 1
-Destroy: "1"
-Destroy: (0)
-Destroy: "2"
-Destroy: "2"
-Destroy: (0, 1)
-Destroy: ""
-Destroy: 3
-Destroy: (0, 1, 2)
-Destroy: "4"
-Destroy: "4"
-Destroy: (0, 1, 2)
-Destroy: (0, 1, 2, 4)
-Destroy: 5
-Destroy: (0, 1, 2, 4)
-Destroy: "6"
-Destroy: "6"
-Destroy: (0, 1, 2, 4)
-Destroy: (0, 1, 2, 4, 6)
-./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:235: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:566: $here/modern
+./c++.at:571: $here/modern
 stdout:
-Modern C++: 202002
-./c++.at:566:  $PREPARSER ./list
+Modern C++: 201703
+./c++.at:571:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -249997,10 +249915,13 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+stderr:
+stdout:
+./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
 ./c++.at:568: $here/modern
@@ -250030,14 +249951,18 @@
 Destroy: ()
 Destroy: (0, 1, 2, 4, 6)
 ./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stdout:
 ======== Testing with C++ standard flags: ''
 ./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:571: $here/modern
+./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+stderr:
 stdout:
-Legac++
-./c++.at:571:  $PREPARSER ./list
+./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
+./c++.at:572: $here/modern
+stdout:
+Modern C++: 201703
+./c++.at:572:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -250060,23 +249985,19 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
 ./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
-./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
-./c++.at:570: $here/modern
+./c++.at:567: $here/modern
 stdout:
-Modern C++: 201402
-./c++.at:570:  $PREPARSER ./list
+Modern C++: 201703
+./c++.at:567:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -250099,10 +250020,26 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+stderr:
+stdout:
+./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
+./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
+./c++.at:235:  $PREPARSER ./list
+stderr:
+./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stdout:
+664. c++.at:107:  ok
+./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+
 stderr:
 stdout:
 ./c++.at:569: $here/modern
@@ -250134,19 +250071,16 @@
 ./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
-./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+673. c++.at:573: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} %locations ...
+======== Testing with C++ standard flags: ''
+./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
 stderr:
 stdout:
-./c++.at:572: $here/modern
+./c++.at:570: $here/modern
+./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stdout:
-Legac++
-./c++.at:572:  $PREPARSER ./list
+Modern C++: 201402
+./c++.at:570:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -250169,19 +250103,17 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 ======== Testing with C++ standard flags: ''
-./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:573: $here/modern
+./c++.at:571: $here/modern
 stdout:
-Modern C++: 201703
-./c++.at:573:  $PREPARSER ./list
+Legac++
+./c++.at:571:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -250204,25 +250136,16 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
-./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
-./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
 stderr:
 stdout:
-./c++.at:571: $here/modern
+./c++.at:566: $here/modern
 stdout:
-Legac++
-./c++.at:571:  $PREPARSER ./list
+./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+Modern C++: 202002
+./c++.at:566:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -250245,16 +250168,22 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:567: $here/modern
+./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
 stdout:
-Modern C++: 202002
-./c++.at:567:  $PREPARSER ./list
+./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
+./c++.at:572: $here/modern
+stdout:
+Legac++
+./c++.at:572:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -250277,13 +250206,13 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
 ./c++.at:568: $here/modern
@@ -250317,10 +250246,20 @@
 ./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
 stderr:
 stdout:
-./c++.at:566: $here/modern
+./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+stderr:
 stdout:
-Modern C++: 202100
-./c++.at:566:  $PREPARSER ./list
+./c++.at:566: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
+./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
+./c++.at:567: $here/modern
+stdout:
+Modern C++: 202002
+./c++.at:567:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -250343,19 +250282,14 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-666. c++.at:566:  ok
-
-./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-674. c++.at:574: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} %locations api.value.automove ...
+./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
 stderr:
 stdout:
-./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
+./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 ./c++.at:570: $here/modern
 stdout:
 Modern C++: 201703
@@ -250383,15 +250317,19 @@
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
 ./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 ======== Testing with C++ standard flags: ''
 ./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+stderr:
+stdout:
+./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 ./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:569: $here/modern
+./c++.at:571: $here/modern
 stdout:
-Modern C++: 202002
-./c++.at:569:  $PREPARSER ./list
+Legac++
+./c++.at:571:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -250414,19 +250352,16 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:572: $here/modern
+./c++.at:573: $here/modern
 stdout:
-Legac++
-./c++.at:572:  $PREPARSER ./list
+Modern C++: 201703
+./c++.at:573:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -250449,22 +250384,16 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-stderr:
-stdout:
-./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:573: $here/modern
+./c++.at:566: $here/modern
 stdout:
-Legac++
-./c++.at:573:  $PREPARSER ./list
+Modern C++: 202100
+./c++.at:566:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -250487,16 +250416,19 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+666. c++.at:566:  ok
+
+674. c++.at:574: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} %locations api.value.automove ...
 ======== Testing with C++ standard flags: ''
-./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
 stderr:
 stdout:
-./c++.at:571: $here/modern
+./c++.at:569: $here/modern
 stdout:
-Modern C++: 201103
-./c++.at:571:  $PREPARSER ./list
+Modern C++: 202002
+./c++.at:569:  $PREPARSER ./list
+./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -250519,57 +250451,22 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
 ./c++.at:568: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
-./c++.at:574: $here/modern
-stdout:
-Modern C++: 201703
-./c++.at:574:  $PREPARSER ./list
-stderr:
-Destroy: ""
-Destroy: ""
-Destroy: 1
-Destroy: ""
-Destroy: ()
-Destroy: ""
-Destroy: ""
-Destroy: ()
-Destroy: ""
-Destroy: 3
-Destroy: ()
-Destroy: ""
-Destroy: ""
-Destroy: ()
-Destroy: ()
-Destroy: 5
-Destroy: ()
-Destroy: ""
-Destroy: ""
-Destroy: ()
-Destroy: (0, 1, 2, 4, 6)
-./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:567: $here/modern
+./c++.at:572: $here/modern
 stdout:
-Modern C++: 202100
-./c++.at:567:  $PREPARSER ./list
+Legac++
+./c++.at:572:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -250592,16 +250489,10 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-667. c++.at:567:  ok
-
-675. c++.at:584: testing Variants and Typed Midrule Actions ...
+./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-stderr:
-stdout:
-./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-./c++.at:659: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
 ./c++.at:568: $here/modern
@@ -250633,42 +250524,25 @@
 ./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 668. c++.at:568:  ok
 
-676. c++.at:794: testing Doxygen Public Documentation ...
-./c++.at:794: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
-./c++.at:794: doxygen --version || exit 77
---- /dev/null	2023-05-17 22:25:16.000000000 -1200
-+++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/676/stderr	2023-05-18 03:43:43.144638864 -1200
-@@ -0,0 +1 @@
-+/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/676/test-source: line 180: doxygen: command not found
-stdout:
-676. c++.at:794:  skipped (c++.at:794)
-
-677. c++.at:795: testing Doxygen Private Documentation ...
-./c++.at:795: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
-./c++.at:795: doxygen --version || exit 77
---- /dev/null	2023-05-17 22:25:16.000000000 -1200
-+++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/677/stderr	2023-05-18 03:43:44.424650350 -1200
-@@ -0,0 +1 @@
-+/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/677/test-source: line 180: doxygen: command not found
-stdout:
-677. c++.at:795:  skipped (c++.at:795)
-
-678. c++.at:848: testing Relative namespace references ...
-./c++.at:849: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
+675. c++.at:584: testing Variants and Typed Midrule Actions ...
 ======== Testing with C++ standard flags: ''
-./c++.at:849: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+./c++.at:659: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
-./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
-./c++.at:570: $here/modern
+./c++.at:567: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
 stdout:
-Modern C++: 202002
-./c++.at:570:  $PREPARSER ./list
+./c++.at:571: $here/modern
+stdout:
+Modern C++: 201103
+./c++.at:571:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -250691,16 +250565,19 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:572: $here/modern
+./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
 stdout:
-Modern C++: 201103
-./c++.at:572:  $PREPARSER ./list
+./c++.at:567: $here/modern
+stdout:
+Modern C++: 202100
+./c++.at:567:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -250723,19 +250600,21 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stdout:
 ./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+667. c++.at:567:  ok
+
 stderr:
 stdout:
-./c++.at:569: $here/modern
+./c++.at:569: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
 stdout:
-Modern C++: 202100
-./c++.at:569:  $PREPARSER ./list
+./c++.at:570: $here/modern
+stdout:
+Modern C++: 202002
+./c++.at:570:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -250758,19 +250637,33 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-669. c++.at:569:  ok
+./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+676. c++.at:794: testing Doxygen Public Documentation ...
+./c++.at:794: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
+./c++.at:794: doxygen --version || exit 77
+--- /dev/null	2024-06-18 06:47:31.000000000 +1400
++++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/676/stderr	2024-06-20 12:41:59.651278020 +1400
+@@ -0,0 +1 @@
++/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/676/test-source: line 180: doxygen: command not found
+stdout:
+676. c++.at:794:  skipped (c++.at:794)
+./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 
-stderr:
+677. c++.at:795: testing Doxygen Private Documentation ...
+./c++.at:795: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
+./c++.at:795: doxygen --version || exit 77
+--- /dev/null	2024-06-18 06:47:31.000000000 +1400
++++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/677/stderr	2024-06-20 12:42:00.255278020 +1400
+@@ -0,0 +1 @@
++/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/677/test-source: line 180: doxygen: command not found
 stdout:
-./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-679. c++.at:854: testing Absolute namespace references ...
-./c++.at:855: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
-======== Testing with C++ standard flags: ''
-./c++.at:855: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+677. c++.at:795:  skipped (c++.at:795)
 stderr:
 stdout:
 ./c++.at:573: $here/modern
+
 stdout:
 Legac++
 ./c++.at:573:  $PREPARSER ./list
@@ -250799,46 +250692,45 @@
 ./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+678. c++.at:848: testing Relative namespace references ...
+./c++.at:849: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
 stderr:
+./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stdout:
-./c++.at:849:  $PREPARSER ./input
-stderr:
-./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 ======== Testing with C++ standard flags: ''
 ./c++.at:849: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:571: $here/modern
+./c++.at:574: $here/modern
 stdout:
-Modern C++: 201402
-./c++.at:571:  $PREPARSER ./list
+Modern C++: 201703
+./c++.at:574:  $PREPARSER ./list
 stderr:
-Destroy: "0"
-Destroy: "0"
+Destroy: ""
+Destroy: ""
 Destroy: 1
-Destroy: "1"
-Destroy: (0)
-Destroy: "2"
-Destroy: "2"
-Destroy: (0, 1)
+Destroy: ""
+Destroy: ()
+Destroy: ""
+Destroy: ""
+Destroy: ()
 Destroy: ""
 Destroy: 3
-Destroy: (0, 1, 2)
-Destroy: "4"
-Destroy: "4"
-Destroy: (0, 1, 2)
-Destroy: (0, 1, 2, 4)
+Destroy: ()
+Destroy: ""
+Destroy: ""
+Destroy: ()
+Destroy: ()
 Destroy: 5
-Destroy: (0, 1, 2, 4)
-Destroy: "6"
-Destroy: "6"
-Destroy: (0, 1, 2, 4)
+Destroy: ()
+Destroy: ""
+Destroy: ""
+Destroy: ()
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
 stderr:
 stdout:
 ./c++.at:659:  $PREPARSER ./input
@@ -250890,23 +250782,49 @@
 ./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./c++.at:659: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
+./c++.at:569: $here/modern
 stdout:
-./c++.at:849:  $PREPARSER ./input
+Modern C++: 202100
+./c++.at:569:  $PREPARSER ./list
 stderr:
-stdout:
+Destroy: "0"
+Destroy: "0"
+Destroy: 1
+Destroy: "1"
+Destroy: (0)
+Destroy: "2"
+Destroy: "2"
+Destroy: (0, 1)
+Destroy: ""
+Destroy: 3
+Destroy: (0, 1, 2)
+Destroy: "4"
+Destroy: "4"
+Destroy: (0, 1, 2)
+Destroy: (0, 1, 2, 4)
+Destroy: 5
+Destroy: (0, 1, 2, 4)
+Destroy: "6"
+Destroy: "6"
+Destroy: (0, 1, 2, 4)
+Destroy: (0, 1, 2, 4, 6)
+./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+669. c++.at:569:  ok
+./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:659: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+
+679. c++.at:854: testing Absolute namespace references ...
+./c++.at:855: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
+======== Testing with C++ standard flags: ''
+./c++.at:855: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
-./c++.at:574: $here/modern
-./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stdout:
-======== Testing with C++ standard flags: ''
-Legac++
-./c++.at:574:  $PREPARSER ./list
-./c++.at:849: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./c++.at:572: $here/modern
+stdout:
+Modern C++: 201103
+./c++.at:572:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -250929,22 +250847,19 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:855:  $PREPARSER ./input
+./c++.at:849:  $PREPARSER ./input
 stderr:
-./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:855: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
+./c++.at:849: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-stdout:
-./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 ./c++.at:855:  $PREPARSER ./input
 stderr:
 ./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -250952,6 +250867,9 @@
 ./c++.at:855: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
+./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
 ./c++.at:849:  $PREPARSER ./input
 stderr:
 ./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -251011,13 +250929,17 @@
 ./c++.at:659: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:571: $here/modern
 stderr:
 stdout:
-./c++.at:572: $here/modern
 stdout:
+./c++.at:855:  $PREPARSER ./input
 Modern C++: 201402
-./c++.at:572:  $PREPARSER ./list
+./c++.at:571:  $PREPARSER ./list
+stderr:
+./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:855: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -251040,28 +250962,21 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+======== Testing with C++ standard flags: ''
+./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
 stdout:
 ./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
-./c++.at:855:  $PREPARSER ./input
-stderr:
-./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:855: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:570: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
 ./c++.at:573: $here/modern
 stdout:
-Modern C++: 201103
+Legac++
 ./c++.at:573:  $PREPARSER ./list
 stderr:
 Destroy: "0"
@@ -251098,6 +251013,9 @@
 ./c++.at:849: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
+./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
 ./c++.at:570: $here/modern
 stdout:
 Modern C++: 202100
@@ -251127,24 +251045,21 @@
 ./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 670. c++.at:570:  ok
 
-680. c++.at:863: testing Syntactically invalid namespace references ...
-./c++.at:864: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
-./c++.at:865: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
-./c++.at:868: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
-./c++.at:869: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
-./c++.at:870: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
-680. c++.at:863:  ok
-
-681. c++.at:884: testing Syntax error discarding no lookahead ...
+stderr:
+stdout:
+./c++.at:855:  $PREPARSER ./input
+stderr:
+./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./c++.at:941: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
+./c++.at:855: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stdout:
-./c++.at:571: $here/modern
+./c++.at:574: $here/modern
 stdout:
-Modern C++: 201703
-./c++.at:571:  $PREPARSER ./list
+680. c++.at:863: testing Syntactically invalid namespace references ...
+Legac++
+./c++.at:574:  $PREPARSER ./list
+./c++.at:864: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -251167,24 +251082,32 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:865: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
+./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:868: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
+./c++.at:869: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
 stderr:
 stdout:
-./c++.at:855:  $PREPARSER ./input
-stderr:
-./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:870: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
+680. c++.at:863:  ok
+
+681. c++.at:884: testing Syntax error discarding no lookahead ...
 ======== Testing with C++ standard flags: ''
-./c++.at:855: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+./c++.at:941: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./c++.at:659:  $PREPARSER ./input
 stderr:
+stdout:
+./c++.at:572: $here/modern
+stderr:
+stderr:
+stdout:
 Starting parse
 Entering state 0
 Stack now 0
@@ -251230,16 +251153,16 @@
 Cleanup: popping nterm expr (40)
 destroy: 40
 ./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stdout:
+./c++.at:849:  $PREPARSER ./input
+Modern C++: 201402
+./c++.at:572:  $PREPARSER ./list
 ======== Testing with C++ standard flags: ''
 ./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./c++.at:659: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
-stdout:
-./c++.at:574: $here/modern
-stdout:
-Legac++
-./c++.at:574:  $PREPARSER ./list
 stderr:
+./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
 Destroy: "0"
 Destroy: "0"
 Destroy: 1
@@ -251261,17 +251184,22 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:849: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:659: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:849:  $PREPARSER ./input
+./c++.at:855:  $PREPARSER ./input
 stderr:
-./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:849: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./c++.at:855: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
 ./c++.at:941:  $PREPARSER ./input
@@ -251285,16 +251213,6 @@
 ./c++.at:941: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
-./c++.at:855:  $PREPARSER ./input
-stderr:
-./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:855: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
 ./c++.at:849:  $PREPARSER ./input
 stderr:
 ./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -251302,13 +251220,10 @@
 ./c++.at:849: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
-./c++.at:572: $here/modern
+./c++.at:571: $here/modern
 stdout:
 Modern C++: 201703
-./c++.at:572:  $PREPARSER ./list
+./c++.at:571:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -251331,14 +251246,27 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+stderr:
+stdout:
+./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+stderr:
+stdout:
+./c++.at:855:  $PREPARSER ./input
+stderr:
+./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:855: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./c++.at:659:  $PREPARSER ./input
 stderr:
+stdout:
+stderr:
+./c++.at:941:  $PREPARSER ./input
 Starting parse
 Entering state 0
 Stack now 0
@@ -251384,25 +251312,22 @@
 Cleanup: popping nterm expr (40)
 destroy: 40
 ./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
 stderr:
-stdout:
-./c++.at:941:  $PREPARSER ./input
-stderr:
-./c++.at:659: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 syntax error
 Discarding 'a'.
 Reducing 'a'.
 ./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
+./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+======== Testing with C++ standard flags: ''
 ./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
 ./c++.at:941: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
+./c++.at:659: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 ./c++.at:573: $here/modern
 stdout:
-Modern C++: 201402
+Modern C++: 201103
 ./c++.at:573:  $PREPARSER ./list
 stderr:
 Destroy: "0"
@@ -251426,37 +251351,16 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-stderr:
 ./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stdout:
-./c++.at:855:  $PREPARSER ./input
-stderr:
 ======== Testing with C++ standard flags: ''
 ./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:855: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
 ./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
-./c++.at:941:  $PREPARSER ./input
-stderr:
-syntax error
-Discarding 'a'.
-Reducing 'a'.
-./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
 ./c++.at:849:  $PREPARSER ./input
-./c++.at:941: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 ./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
@@ -251465,41 +251369,9 @@
 stdout:
 ./c++.at:574: $here/modern
 stdout:
-Modern C++: 201103
+Legac++
 ./c++.at:574:  $PREPARSER ./list
 stderr:
-Destroy: ""
-Destroy: ""
-Destroy: 1
-Destroy: ""
-Destroy: ()
-Destroy: ""
-Destroy: ""
-Destroy: ()
-Destroy: ""
-Destroy: 3
-Destroy: ()
-Destroy: ""
-Destroy: ""
-Destroy: ()
-Destroy: ()
-Destroy: 5
-Destroy: ()
-Destroy: ""
-Destroy: ""
-Destroy: ()
-Destroy: (0, 1, 2, 4, 6)
-./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:571: $here/modern
-stdout:
-Modern C++: 202002
-./c++.at:571:  $PREPARSER ./list
-stderr:
 Destroy: "0"
 Destroy: "0"
 Destroy: 1
@@ -251521,62 +251393,13 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
 stderr:
 stdout:
-./c++.at:659:  $PREPARSER ./input
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-Next token is token NUMBER (1)
-Shifting token NUMBER (1)
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 1 (line 34):
-   $1 = token NUMBER (1)
--> $$ = nterm expr (10)
-destroy: 1
-Entering state 2
-Stack now 0 2
-Reading a token
-Next token is token NUMBER (30)
-Reducing stack by rule 2 (line 35):
--> $$ = nterm @1 (20)
-Entering state 4
-Stack now 0 2 4
-Next token is token NUMBER (30)
-Shifting token NUMBER (30)
-Entering state 5
-Stack now 0 2 4 5
-Reducing stack by rule 3 (line 35):
-   $1 = nterm expr (10)
-   $2 = nterm @1 (20)
-   $3 = token NUMBER (30)
-expr: 10 20 30
--> $$ = nterm expr (40)
-destroy: 30
-destroy: 20
-destroy: 10
-Entering state 2
-Stack now 0 2
-Reading a token
-Next token is token EOI ()
-Shifting token EOI ()
-Entering state 3
-Stack now 0 2 3
-Stack now 0 2 3
-Cleanup: popping token EOI ()
-Cleanup: popping nterm expr (40)
-destroy: 40
-./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./c++.at:659: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
 ./c++.at:855:  $PREPARSER ./input
@@ -251597,36 +251420,11 @@
 ./c++.at:941: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:849:  $PREPARSER ./input
-stderr:
-./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:850: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
-======== Testing with C++ standard flags: ''
-./c++.at:850: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
-./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
-./c++.at:855:  $PREPARSER ./input
-stderr:
-./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
-======== Testing with C++ standard flags: ''
-./c++.at:856: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./c++.at:573: $here/modern
+./c++.at:572: $here/modern
 stdout:
-stderr:
 Modern C++: 201703
-./c++.at:573:  $PREPARSER ./list
-stdout:
+./c++.at:572:  $PREPARSER ./list
 stderr:
-./c++.at:659:  $PREPARSER ./input
 Destroy: "0"
 Destroy: "0"
 Destroy: 1
@@ -251648,8 +251446,14 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
+./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+stderr:
+stdout:
+./c++.at:659:  $PREPARSER ./input
 stderr:
-./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
@@ -251695,32 +251499,49 @@
 Cleanup: popping nterm expr (40)
 destroy: 40
 ./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stdout:
 ======== Testing with C++ standard flags: ''
 ./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+./c++.at:849:  $PREPARSER ./input
+stderr:
+./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:849: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./c++.at:659: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./c++.at:941:  $PREPARSER ./input
 stderr:
-./c++.at:659: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 syntax error
 Discarding 'a'.
 Reducing 'a'.
 ./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stdout:
+./c++.at:855:  $PREPARSER ./input
+stderr:
+./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+======== Testing with C++ standard flags: ''
+./c++.at:855: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 ./c++.at:941: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
+./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
+./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
 ./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
-./c++.at:572: $here/modern
+./c++.at:571: $here/modern
 stdout:
 Modern C++: 202002
-./c++.at:572:  $PREPARSER ./list
+./c++.at:571:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -251743,22 +251564,55 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
 stderr:
 stdout:
-./c++.at:850:  $PREPARSER ./input
+./c++.at:573: $here/modern
+stdout:
+Modern C++: 201402
+./c++.at:573:  $PREPARSER ./list
 stderr:
-./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Destroy: "0"
+Destroy: "0"
+Destroy: 1
+Destroy: "1"
+Destroy: (0)
+Destroy: "2"
+Destroy: "2"
+Destroy: (0, 1)
+Destroy: ""
+Destroy: 3
+Destroy: (0, 1, 2)
+Destroy: "4"
+Destroy: "4"
+Destroy: (0, 1, 2)
+Destroy: (0, 1, 2, 4)
+Destroy: 5
+Destroy: (0, 1, 2, 4)
+Destroy: "6"
+Destroy: "6"
+Destroy: (0, 1, 2, 4)
+Destroy: (0, 1, 2, 4, 6)
+./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+stderr:
+stdout:
+./c++.at:849:  $PREPARSER ./input
+stderr:
+./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:850: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
 ======== Testing with C++ standard flags: ''
 ./c++.at:850: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./c++.at:574: $here/modern
 stdout:
-Modern C++: 201402
+Modern C++: 201103
 ./c++.at:574:  $PREPARSER ./list
 stderr:
 Destroy: ""
@@ -251785,17 +251639,81 @@
 ./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+stderr:
+stdout:
+./c++.at:941:  $PREPARSER ./input
+stderr:
+syntax error
+Discarding 'a'.
+Reducing 'a'.
+./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+./c++.at:941: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 ./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:856:  $PREPARSER ./input
+./c++.at:855:  $PREPARSER ./input
 stderr:
-./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stdout:
+./c++.at:659:  $PREPARSER ./input
+./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+Next token is token NUMBER (1)
+Shifting token NUMBER (1)
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 1 (line 34):
+   $1 = token NUMBER (1)
+-> $$ = nterm expr (10)
+destroy: 1
+Entering state 2
+Stack now 0 2
+Reading a token
+Next token is token NUMBER (30)
+Reducing stack by rule 2 (line 35):
+-> $$ = nterm @1 (20)
+Entering state 4
+Stack now 0 2 4
+Next token is token NUMBER (30)
+Shifting token NUMBER (30)
+Entering state 5
+Stack now 0 2 4 5
+Reducing stack by rule 3 (line 35):
+   $1 = nterm expr (10)
+   $2 = nterm @1 (20)
+   $3 = token NUMBER (30)
+expr: 10 20 30
+-> $$ = nterm expr (40)
+destroy: 30
+destroy: 20
+destroy: 10
+Entering state 2
+Stack now 0 2
+Reading a token
+Next token is token EOI ()
+Shifting token EOI ()
+Entering state 3
+Stack now 0 2 3
+Stack now 0 2 3
+Cleanup: popping token EOI ()
+Cleanup: popping nterm expr (40)
+destroy: 40
+./c++.at:856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
+./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
 ======== Testing with C++ standard flags: ''
 ./c++.at:856: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./c++.at:659: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
 ./c++.at:850:  $PREPARSER ./input
@@ -251823,10 +251741,10 @@
 ./c++.at:856: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:571: $here/modern
+./c++.at:572: $here/modern
 stdout:
-Modern C++: 202100
-./c++.at:571:  $PREPARSER ./list
+Modern C++: 202002
+./c++.at:572:  $PREPARSER ./list
 stderr:
 Destroy: "0"
 Destroy: "0"
@@ -251849,13 +251767,10 @@
 Destroy: "6"
 Destroy: (0, 1, 2, 4)
 Destroy: (0, 1, 2, 4, 6)
-./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-671. c++.at:571:  ok
-
-682. c++.at:1064: testing Syntax error as exception: lalr1.cc ...
-./c++.at:1064: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
+./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:1064: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
+./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
 ./c++.at:850:  $PREPARSER ./input
@@ -251865,6 +251780,19 @@
 ./c++.at:850: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
+./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
+./c++.at:856:  $PREPARSER ./input
+stderr:
+./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:856: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./c++.at:571: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
 ./c++.at:659:  $PREPARSER ./input
 stderr:
 Starting parse
@@ -251917,50 +251845,12 @@
 ./c++.at:659: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:856:  $PREPARSER ./input
-stderr:
-./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:856: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./c++.at:941:  $PREPARSER ./input
-stderr:
-syntax error
-Discarding 'a'.
-Reducing 'a'.
-./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./c++.at:941: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
-./c++.at:850:  $PREPARSER ./input
-stderr:
-./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:850: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
 ./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
-./c++.at:856:  $PREPARSER ./input
-stderr:
-./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:856: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
 ./c++.at:573: $here/modern
 stdout:
-Modern C++: 202002
+Modern C++: 201703
 ./c++.at:573:  $PREPARSER ./list
 stderr:
 Destroy: "0"
@@ -251987,68 +251877,35 @@
 ./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
-./c++.at:1064:  $PREPARSER ./input < in
+./c++.at:850:  $PREPARSER ./input
 stderr:
+./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:850: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
-error: invalid expression
-caught error
-error: invalid character
-caught error
-./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stdout:
-./c++.at:572: $here/modern
-./c++.at:1064:  $PREPARSER ./input < in
 stdout:
+./c++.at:941:  $PREPARSER ./input
 stderr:
-error: invalid expression
-./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-Modern C++: 202100
-./c++.at:572:  $PREPARSER ./list
-./c++.at:1064:  $PREPARSER ./input < in
-stderr:
-stderr:
-error: invalid character
-./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-Destroy: "0"
-Destroy: "0"
-Destroy: 1
-Destroy: "1"
-Destroy: (0)
-Destroy: "2"
-Destroy: "2"
-Destroy: (0, 1)
-Destroy: ""
-Destroy: 3
-Destroy: (0, 1, 2)
-Destroy: "4"
-Destroy: "4"
-Destroy: (0, 1, 2)
-Destroy: (0, 1, 2, 4)
-Destroy: 5
-Destroy: (0, 1, 2, 4)
-Destroy: "6"
-Destroy: "6"
-Destroy: (0, 1, 2, 4)
-Destroy: (0, 1, 2, 4, 6)
-./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+syntax error
+Discarding 'a'.
+Reducing 'a'.
+./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:1064: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
-672. c++.at:572:  ok
-
-683. c++.at:1065: testing Syntax error as exception: glr.cc ...
-./c++.at:1065: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
+./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+./c++.at:941: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./c++.at:574: $here/modern
 stdout:
-Modern C++: 201703
+Modern C++: 201402
+stderr:
 ./c++.at:574:  $PREPARSER ./list
+stdout:
 stderr:
-======== Testing with C++ standard flags: ''
-./c++.at:1065: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
+./c++.at:571: $here/modern
 Destroy: ""
 Destroy: ""
 Destroy: 1
@@ -252070,10 +251927,76 @@
 Destroy: ""
 Destroy: ()
 Destroy: (0, 1, 2, 4, 6)
+stdout:
 ./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Modern C++: 202100
+./c++.at:571:  $PREPARSER ./list
+stderr:
 ======== Testing with C++ standard flags: ''
 ./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+Destroy: "0"
+Destroy: "0"
+Destroy: 1
+Destroy: "1"
+Destroy: (0)
+Destroy: "2"
+Destroy: "2"
+Destroy: (0, 1)
+Destroy: ""
+Destroy: 3
+Destroy: (0, 1, 2)
+Destroy: "4"
+Destroy: "4"
+Destroy: (0, 1, 2)
+Destroy: (0, 1, 2, 4)
+Destroy: 5
+Destroy: (0, 1, 2, 4)
+Destroy: "6"
+Destroy: "6"
+Destroy: (0, 1, 2, 4)
+Destroy: (0, 1, 2, 4, 6)
+./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+671. c++.at:571:  ok
+
 ./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+682. c++.at:1064: testing Syntax error as exception: lalr1.cc ...
+./c++.at:1064: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
+stderr:
+stdout:
+./c++.at:856:  $PREPARSER ./input
+stderr:
+======== Testing with C++ standard flags: ''
+./c++.at:1064: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
+./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:856: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./c++.at:850:  $PREPARSER ./input
+stderr:
+./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:850: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./c++.at:856:  $PREPARSER ./input
+stderr:
+./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:856: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./c++.at:572: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
+./c++.at:941:  $PREPARSER ./input
+stderr:
+syntax error
+Discarding 'a'.
+Reducing 'a'.
+./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+681. c++.at:884:  ok
+
 stderr:
 stdout:
 ./c++.at:659:  $PREPARSER ./input
@@ -252125,42 +252048,92 @@
 ./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 675. c++.at:584:  ok
 
+683. c++.at:1065: testing Syntax error as exception: glr.cc ...
+./c++.at:1065: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
+======== Testing with C++ standard flags: ''
+./c++.at:1065: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
+684. c++.at:1066: testing Syntax error as exception: glr2.cc ...
+./c++.at:1066: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
+======== Testing with C++ standard flags: ''
+./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
-./c++.at:941:  $PREPARSER ./input
-stderr:
-syntax error
-Discarding 'a'.
-Reducing 'a'.
-./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-681. c++.at:884:  ok
+./c++.at:1066: ./check
+./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
 stderr:
 stdout:
-
-684. c++.at:1066: testing Syntax error as exception: glr2.cc ...
-./c++.at:1066: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
 ./c++.at:850:  $PREPARSER ./input
 stderr:
 ./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:850: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-======== Testing with C++ standard flags: ''
-./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+stderr:
+stdout:
+./c++.at:572: $here/modern
+stdout:
+Modern C++: 202100
+./c++.at:572:  $PREPARSER ./list
+stderr:
+Destroy: "0"
+Destroy: "0"
+Destroy: 1
+Destroy: "1"
+Destroy: (0)
+Destroy: "2"
+Destroy: "2"
+Destroy: (0, 1)
+Destroy: ""
+Destroy: 3
+Destroy: (0, 1, 2)
+Destroy: "4"
+Destroy: "4"
+Destroy: (0, 1, 2)
+Destroy: (0, 1, 2, 4)
+Destroy: 5
+Destroy: (0, 1, 2, 4)
+Destroy: "6"
+Destroy: "6"
+Destroy: (0, 1, 2, 4)
+Destroy: (0, 1, 2, 4, 6)
+./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+672. c++.at:572:  ok
+
+stderr:
 685. c++.at:1360: testing Exception safety with error recovery  ...
 ./c++.at:1360: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS
-stderr:
 stdout:
-./c++.at:1066: ./check
-./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
+./c++.at:1064:  $PREPARSER ./input < in
+stderr:
+error: invalid expression
+caught error
+error: invalid character
+caught error
+./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1064:  $PREPARSER ./input < in
+stderr:
+error: invalid expression
+./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1064:  $PREPARSER ./input < in
+stderr:
+error: invalid character
 stderr:
+./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stdout:
 ./c++.at:856:  $PREPARSER ./input
+======== Testing with C++ standard flags: ''
 stderr:
+./c++.at:1064: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
 ./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:856: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
+./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
+./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
 ./c++.at:1360: ./exceptions || exit 77
 stderr:
 Inner caught
@@ -252177,33 +252150,78 @@
 ./c++.at:850: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:1064:  $PREPARSER ./input < in
-stderr:
-error: invalid expression
-caught error
-error: invalid character
-caught error
-./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1064:  $PREPARSER ./input < in
-stderr:
-error: invalid expression
-./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1064:  $PREPARSER ./input < in
+./c++.at:574: $here/modern
+stdout:
+Modern C++: 201703
+./c++.at:574:  $PREPARSER ./list
 stderr:
-error: invalid character
-./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Destroy: ""
+Destroy: ""
+Destroy: 1
+Destroy: ""
+Destroy: ()
+Destroy: ""
+Destroy: ""
+Destroy: ()
+Destroy: ""
+Destroy: 3
+Destroy: ()
+Destroy: ""
+Destroy: ""
+Destroy: ()
+Destroy: ()
+Destroy: 5
+Destroy: ()
+Destroy: ""
+Destroy: ""
+Destroy: ()
+Destroy: (0, 1, 2, 4, 6)
+./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:1064: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
+./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 stdout:
 ./c++.at:856:  $PREPARSER ./input
 stderr:
+stderr:
+stdout:
 ./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:573: $here/modern
 ======== Testing with C++ standard flags: ''
+stdout:
 ./c++.at:856: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+Modern C++: 202002
+./c++.at:573:  $PREPARSER ./list
+stderr:
+Destroy: "0"
+Destroy: "0"
+Destroy: 1
+Destroy: "1"
+Destroy: (0)
+Destroy: "2"
+Destroy: "2"
+Destroy: (0, 1)
+Destroy: ""
+Destroy: 3
+Destroy: (0, 1, 2)
+Destroy: "4"
+Destroy: "4"
+Destroy: (0, 1, 2)
+Destroy: (0, 1, 2, 4)
+Destroy: 5
+Destroy: (0, 1, 2, 4)
+Destroy: "6"
+Destroy: "6"
+Destroy: (0, 1, 2, 4)
+Destroy: (0, 1, 2, 4, 6)
+./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
 stderr:
 stdout:
 ./c++.at:1065:  $PREPARSER ./input < in
+./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
 stderr:
 error: invalid expression
 caught error
@@ -252222,26 +252240,6 @@
 ./c++.at:1065: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
 stderr:
 stdout:
-./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
-./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
-./c++.at:850:  $PREPARSER ./input
-stderr:
-./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:850: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./c++.at:856:  $PREPARSER ./input
-stderr:
-./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:856: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
 ./c++.at:1064:  $PREPARSER ./input < in
 stderr:
 error: invalid expression
@@ -252261,6 +252259,20 @@
 ./c++.at:1064: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
 stderr:
 stdout:
+./c++.at:850:  $PREPARSER ./input
+stderr:
+./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:850: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./c++.at:856:  $PREPARSER ./input
+stderr:
+./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:856: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
 ./c++.at:1360:  $PREPARSER ./input aaaas
 stderr:
 exception caught: reduction
@@ -252282,57 +252294,57 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xaaaadd3d5b40->Object::Object { }
-Next token is token 'a' (0xaaaadd3d5b40 'a')
-Shifting token 'a' (0xaaaadd3d5b40 'a')
+0xaaaad22b7b40->Object::Object { }
+Next token is token 'a' (0xaaaad22b7b40 'a')
+Shifting token 'a' (0xaaaad22b7b40 'a')
 Entering state 2
 Stack now 0 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaadd3d5b40 'a')
--> $$ = nterm item (0xaaaadd3d5b40 'a')
+   $1 = token 'a' (0xaaaad22b7b40 'a')
+-> $$ = nterm item (0xaaaad22b7b40 'a')
 Entering state 11
 Stack now 0 11
 Reading a token
-0xaaaadd3d5b90->Object::Object { 0xaaaadd3d5b40 }
-Next token is token 'a' (0xaaaadd3d5b90 'a')
-Shifting token 'a' (0xaaaadd3d5b90 'a')
+0xaaaad22b7b90->Object::Object { 0xaaaad22b7b40 }
+Next token is token 'a' (0xaaaad22b7b90 'a')
+Shifting token 'a' (0xaaaad22b7b90 'a')
 Entering state 2
 Stack now 0 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaadd3d5b90 'a')
--> $$ = nterm item (0xaaaadd3d5b90 'a')
+   $1 = token 'a' (0xaaaad22b7b90 'a')
+-> $$ = nterm item (0xaaaad22b7b90 'a')
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xaaaadd3d5be0->Object::Object { 0xaaaadd3d5b40, 0xaaaadd3d5b90 }
-Next token is token 'a' (0xaaaadd3d5be0 'a')
-Shifting token 'a' (0xaaaadd3d5be0 'a')
+0xaaaad22b7be0->Object::Object { 0xaaaad22b7b40, 0xaaaad22b7b90 }
+Next token is token 'a' (0xaaaad22b7be0 'a')
+Shifting token 'a' (0xaaaad22b7be0 'a')
 Entering state 2
 Stack now 0 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaadd3d5be0 'a')
--> $$ = nterm item (0xaaaadd3d5be0 'a')
+   $1 = token 'a' (0xaaaad22b7be0 'a')
+-> $$ = nterm item (0xaaaad22b7be0 'a')
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xaaaadd3d5c30->Object::Object { 0xaaaadd3d5b40, 0xaaaadd3d5b90, 0xaaaadd3d5be0 }
-Next token is token 'a' (0xaaaadd3d5c30 'a')
-Shifting token 'a' (0xaaaadd3d5c30 'a')
+0xaaaad22b7c30->Object::Object { 0xaaaad22b7b40, 0xaaaad22b7b90, 0xaaaad22b7be0 }
+Next token is token 'a' (0xaaaad22b7c30 'a')
+Shifting token 'a' (0xaaaad22b7c30 'a')
 Entering state 2
 Stack now 0 11 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaadd3d5c30 'a')
--> $$ = nterm item (0xaaaadd3d5c30 'a')
+   $1 = token 'a' (0xaaaad22b7c30 'a')
+-> $$ = nterm item (0xaaaad22b7c30 'a')
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xaaaadd3d5c80->Object::Object { 0xaaaadd3d5b40, 0xaaaadd3d5b90, 0xaaaadd3d5be0, 0xaaaadd3d5c30 }
-Next token is token 'p' (0xaaaadd3d5c80 'p'Exception caught: cleaning lookahead and stack
-0xaaaadd3d5c80->Object::~Object { 0xaaaadd3d5b40, 0xaaaadd3d5b90, 0xaaaadd3d5be0, 0xaaaadd3d5c30, 0xaaaadd3d5c80 }
-0xaaaadd3d5c30->Object::~Object { 0xaaaadd3d5b40, 0xaaaadd3d5b90, 0xaaaadd3d5be0, 0xaaaadd3d5c30 }
-0xaaaadd3d5be0->Object::~Object { 0xaaaadd3d5b40, 0xaaaadd3d5b90, 0xaaaadd3d5be0 }
-0xaaaadd3d5b90->Object::~Object { 0xaaaadd3d5b40, 0xaaaadd3d5b90 }
-0xaaaadd3d5b40->Object::~Object { 0xaaaadd3d5b40 }
+0xaaaad22b7c80->Object::Object { 0xaaaad22b7b40, 0xaaaad22b7b90, 0xaaaad22b7be0, 0xaaaad22b7c30 }
+Next token is token 'p' (0xaaaad22b7c80 'p'Exception caught: cleaning lookahead and stack
+0xaaaad22b7c80->Object::~Object { 0xaaaad22b7b40, 0xaaaad22b7b90, 0xaaaad22b7be0, 0xaaaad22b7c30, 0xaaaad22b7c80 }
+0xaaaad22b7c30->Object::~Object { 0xaaaad22b7b40, 0xaaaad22b7b90, 0xaaaad22b7be0, 0xaaaad22b7c30 }
+0xaaaad22b7be0->Object::~Object { 0xaaaad22b7b40, 0xaaaad22b7b90, 0xaaaad22b7be0 }
+0xaaaad22b7b90->Object::~Object { 0xaaaad22b7b40, 0xaaaad22b7b90 }
+0xaaaad22b7b40->Object::~Object { 0xaaaad22b7b40 }
 exception caught: printer
 end { }
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -252341,57 +252353,57 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xaaaadd3d5b40->Object::Object { }
-Next token is token 'a' (0xaaaadd3d5b40 'a')
-Shifting token 'a' (0xaaaadd3d5b40 'a')
+0xaaaad22b7b40->Object::Object { }
+Next token is token 'a' (0xaaaad22b7b40 'a')
+Shifting token 'a' (0xaaaad22b7b40 'a')
 Entering state 2
 Stack now 0 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaadd3d5b40 'a')
--> $$ = nterm item (0xaaaadd3d5b40 'a')
+   $1 = token 'a' (0xaaaad22b7b40 'a')
+-> $$ = nterm item (0xaaaad22b7b40 'a')
 Entering state 11
 Stack now 0 11
 Reading a token
-0xaaaadd3d5b90->Object::Object { 0xaaaadd3d5b40 }
-Next token is token 'a' (0xaaaadd3d5b90 'a')
-Shifting token 'a' (0xaaaadd3d5b90 'a')
+0xaaaad22b7b90->Object::Object { 0xaaaad22b7b40 }
+Next token is token 'a' (0xaaaad22b7b90 'a')
+Shifting token 'a' (0xaaaad22b7b90 'a')
 Entering state 2
 Stack now 0 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaadd3d5b90 'a')
--> $$ = nterm item (0xaaaadd3d5b90 'a')
+   $1 = token 'a' (0xaaaad22b7b90 'a')
+-> $$ = nterm item (0xaaaad22b7b90 'a')
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xaaaadd3d5be0->Object::Object { 0xaaaadd3d5b40, 0xaaaadd3d5b90 }
-Next token is token 'a' (0xaaaadd3d5be0 'a')
-Shifting token 'a' (0xaaaadd3d5be0 'a')
+0xaaaad22b7be0->Object::Object { 0xaaaad22b7b40, 0xaaaad22b7b90 }
+Next token is token 'a' (0xaaaad22b7be0 'a')
+Shifting token 'a' (0xaaaad22b7be0 'a')
 Entering state 2
 Stack now 0 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaadd3d5be0 'a')
--> $$ = nterm item (0xaaaadd3d5be0 'a')
+   $1 = token 'a' (0xaaaad22b7be0 'a')
+-> $$ = nterm item (0xaaaad22b7be0 'a')
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xaaaadd3d5c30->Object::Object { 0xaaaadd3d5b40, 0xaaaadd3d5b90, 0xaaaadd3d5be0 }
-Next token is token 'a' (0xaaaadd3d5c30 'a')
-Shifting token 'a' (0xaaaadd3d5c30 'a')
+0xaaaad22b7c30->Object::Object { 0xaaaad22b7b40, 0xaaaad22b7b90, 0xaaaad22b7be0 }
+Next token is token 'a' (0xaaaad22b7c30 'a')
+Shifting token 'a' (0xaaaad22b7c30 'a')
 Entering state 2
 Stack now 0 11 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaadd3d5c30 'a')
--> $$ = nterm item (0xaaaadd3d5c30 'a')
+   $1 = token 'a' (0xaaaad22b7c30 'a')
+-> $$ = nterm item (0xaaaad22b7c30 'a')
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xaaaadd3d5c80->Object::Object { 0xaaaadd3d5b40, 0xaaaadd3d5b90, 0xaaaadd3d5be0, 0xaaaadd3d5c30 }
-Next token is token 'p' (0xaaaadd3d5c80 'p'Exception caught: cleaning lookahead and stack
-0xaaaadd3d5c80->Object::~Object { 0xaaaadd3d5b40, 0xaaaadd3d5b90, 0xaaaadd3d5be0, 0xaaaadd3d5c30, 0xaaaadd3d5c80 }
-0xaaaadd3d5c30->Object::~Object { 0xaaaadd3d5b40, 0xaaaadd3d5b90, 0xaaaadd3d5be0, 0xaaaadd3d5c30 }
-0xaaaadd3d5be0->Object::~Object { 0xaaaadd3d5b40, 0xaaaadd3d5b90, 0xaaaadd3d5be0 }
-0xaaaadd3d5b90->Object::~Object { 0xaaaadd3d5b40, 0xaaaadd3d5b90 }
-0xaaaadd3d5b40->Object::~Object { 0xaaaadd3d5b40 }
+0xaaaad22b7c80->Object::Object { 0xaaaad22b7b40, 0xaaaad22b7b90, 0xaaaad22b7be0, 0xaaaad22b7c30 }
+Next token is token 'p' (0xaaaad22b7c80 'p'Exception caught: cleaning lookahead and stack
+0xaaaad22b7c80->Object::~Object { 0xaaaad22b7b40, 0xaaaad22b7b90, 0xaaaad22b7be0, 0xaaaad22b7c30, 0xaaaad22b7c80 }
+0xaaaad22b7c30->Object::~Object { 0xaaaad22b7b40, 0xaaaad22b7b90, 0xaaaad22b7be0, 0xaaaad22b7c30 }
+0xaaaad22b7be0->Object::~Object { 0xaaaad22b7b40, 0xaaaad22b7b90, 0xaaaad22b7be0 }
+0xaaaad22b7b90->Object::~Object { 0xaaaad22b7b40, 0xaaaad22b7b90 }
+0xaaaad22b7b40->Object::~Object { 0xaaaad22b7b40 }
 exception caught: printer
 end { }
 ./c++.at:1360: grep '^exception caught: printer$' stderr
@@ -252415,69 +252427,6 @@
 ./c++.at:1360: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:574: $here/modern
-stdout:
-Modern C++: 202002
-./c++.at:574:  $PREPARSER ./list
-stderr:
-Destroy: ""
-Destroy: ""
-Destroy: 1
-Destroy: ""
-Destroy: ()
-Destroy: ""
-Destroy: ""
-Destroy: ()
-Destroy: ""
-Destroy: 3
-Destroy: ()
-Destroy: ""
-Destroy: ""
-Destroy: ()
-Destroy: ()
-Destroy: 5
-Destroy: ()
-Destroy: ""
-Destroy: ""
-Destroy: ()
-Destroy: (0, 1, 2, 4, 6)
-./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
-./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
-stderr:
-stdout:
-./c++.at:573: $here/modern
-stdout:
-Modern C++: 202100
-./c++.at:573:  $PREPARSER ./list
-stderr:
-Destroy: "0"
-Destroy: "0"
-Destroy: 1
-Destroy: "1"
-Destroy: (0)
-Destroy: "2"
-Destroy: "2"
-Destroy: (0, 1)
-Destroy: ""
-Destroy: 3
-Destroy: (0, 1, 2)
-Destroy: "4"
-Destroy: "4"
-Destroy: (0, 1, 2)
-Destroy: (0, 1, 2, 4)
-Destroy: 5
-Destroy: (0, 1, 2, 4)
-Destroy: "6"
-Destroy: "6"
-Destroy: (0, 1, 2, 4)
-Destroy: (0, 1, 2, 4, 6)
-./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-673. c++.at:573:  ok
-
-stderr:
-stdout:
 ./c++.at:1065:  $PREPARSER ./input < in
 stderr:
 error: invalid expression
@@ -252495,8 +252444,25 @@
 ./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:1065: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
-686. c++.at:1361: testing Exception safety without error recovery  ...
-./c++.at:1361: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS
+stderr:
+stdout:
+./c++.at:1064:  $PREPARSER ./input < in
+stderr:
+error: invalid expression
+caught error
+error: invalid character
+caught error
+./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1064:  $PREPARSER ./input < in
+stderr:
+error: invalid expression
+./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1064:  $PREPARSER ./input < in
+stderr:
+error: invalid character
+./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:1064: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
 stderr:
 stdout:
 ./c++.at:1066:  $PREPARSER ./input < in
@@ -252534,13 +252500,7 @@
 ./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
 stderr:
 stdout:
-./c++.at:1361: ./exceptions || exit 77
-stderr:
-Inner caught
-Outer caught
-./c++.at:1361: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc --report=all input.yy
-======== Testing with C++ standard flags: ''
-./c++.at:1361: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
 stderr:
 stdout:
 ./c++.at:850:  $PREPARSER ./input
@@ -252559,6 +252519,60 @@
 ./c++.at:857: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
+./c++.at:574: $here/modern
+stdout:
+Modern C++: 202002
+./c++.at:574:  $PREPARSER ./list
+stderr:
+Destroy: ""
+Destroy: ""
+Destroy: 1
+Destroy: ""
+Destroy: ()
+Destroy: ""
+Destroy: ""
+Destroy: ()
+Destroy: ""
+Destroy: 3
+Destroy: ()
+Destroy: ""
+Destroy: ""
+Destroy: ()
+Destroy: ()
+Destroy: 5
+Destroy: ()
+Destroy: ""
+Destroy: ""
+Destroy: ()
+Destroy: (0, 1, 2, 4, 6)
+./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o list.cc list.y
+./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o list list.cc $LIBS
+stderr:
+stdout:
+./c++.at:1065:  $PREPARSER ./input < in
+stderr:
+error: invalid expression
+caught error
+error: invalid character
+caught error
+./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1065:  $PREPARSER ./input < in
+stderr:
+error: invalid expression
+./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1065:  $PREPARSER ./input < in
+stderr:
+error: invalid character
+./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:1065: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
+stderr:
+stdout:
+./c++.at:573: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
 ./c++.at:1360:  $PREPARSER ./input aaaas
 stderr:
 exception caught: reduction
@@ -252573,163 +252587,233 @@
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1360:  $PREPARSER ./input aaaap
 stderr:
+stderr:
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stdout:
+./c++.at:1064:  $PREPARSER ./input < in
 ./c++.at:1360:  $PREPARSER ./input --debug aaaap
 stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-0xaaaaeb4e0b40->Object::Object { }
-Next token is token 'a' (0xaaaaeb4e0b40 'a')
-Shifting token 'a' (0xaaaaeb4e0b40 'a')
+0xaaaac28b5b40->Object::Object { }
+Next token is token 'a' (0xaaaac28b5b40 'a')
+Shifting token 'a' (0xaaaac28b5b40 'a')
 Entering state 2
 Stack now 0 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaeb4e0b40 'a')
--> $$ = nterm item (0xaaaaeb4e0b40 'a')
+   $1 = token 'a' (0xaaaac28b5b40 'a')
+-> $$ = nterm item (0xaaaac28b5b40 'a')
 Entering state 11
 Stack now 0 11
 Reading a token
-0xaaaaeb4e0b90->Object::Object { 0xaaaaeb4e0b40 }
-Next token is token 'a' (0xaaaaeb4e0b90 'a')
-Shifting token 'a' (0xaaaaeb4e0b90 'a')
+0xaaaac28b5b90->Object::Object { 0xaaaac28b5b40 }
+Next token is token 'a' (0xaaaac28b5b90 'a')
+Shifting token 'a' (0xaaaac28b5b90 'a')
 Entering state 2
 Stack now 0 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaeb4e0b90 'a')
--> $$ = nterm item (0xaaaaeb4e0b90 'a')
+   $1 = token 'a' (0xaaaac28b5b90 'a')
+-> $$ = nterm item (0xaaaac28b5b90 'a')
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xaaaaeb4e0be0->Object::Object { 0xaaaaeb4e0b40, 0xaaaaeb4e0b90 }
-Next token is token 'a' (0xaaaaeb4e0be0 'a')
-Shifting token 'a' (0xaaaaeb4e0be0 'a')
+0xaaaac28b5be0->Object::Object { 0xaaaac28b5b40, 0xaaaac28b5b90 }
+Next token is token 'a' (0xaaaac28b5be0 'a')
+Shifting token 'a' (0xaaaac28b5be0 'a')
 Entering state 2
 Stack now 0 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaeb4e0be0 'a')
--> $$ = nterm item (0xaaaaeb4e0be0 'a')
+   $1 = token 'a' (0xaaaac28b5be0 'a')
+-> $$ = nterm item (0xaaaac28b5be0 'a')
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xaaaaeb4e0c30->Object::Object { 0xaaaaeb4e0b40, 0xaaaaeb4e0b90, 0xaaaaeb4e0be0 }
-Next token is token 'a' (0xaaaaeb4e0c30 'a')
-Shifting token 'a' (0xaaaaeb4e0c30 'a')
+0xaaaac28b5c30->Object::Object { 0xaaaac28b5b40, 0xaaaac28b5b90, 0xaaaac28b5be0 }
+Next token is token 'a' (0xaaaac28b5c30 'a')
+Shifting token 'a' (0xaaaac28b5c30 'a')
 Entering state 2
 Stack now 0 11 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaeb4e0c30 'a')
--> $$ = nterm item (0xaaaaeb4e0c30 'a')
+   $1 = token 'a' (0xaaaac28b5c30 'a')
+-> $$ = nterm item (0xaaaac28b5c30 'a')
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xaaaaeb4e0c80->Object::Object { 0xaaaaeb4e0b40, 0xaaaaeb4e0b90, 0xaaaaeb4e0be0, 0xaaaaeb4e0c30 }
-Next token is token 'p' (0xaaaaeb4e0c80 'p'Exception caught: cleaning lookahead and stack
-0xaaaaeb4e0c80->Object::~Object { 0xaaaaeb4e0b40, 0xaaaaeb4e0b90, 0xaaaaeb4e0be0, 0xaaaaeb4e0c30, 0xaaaaeb4e0c80 }
-0xaaaaeb4e0c30->Object::~Object { 0xaaaaeb4e0b40, 0xaaaaeb4e0b90, 0xaaaaeb4e0be0, 0xaaaaeb4e0c30 }
-0xaaaaeb4e0be0->Object::~Object { 0xaaaaeb4e0b40, 0xaaaaeb4e0b90, 0xaaaaeb4e0be0 }
-0xaaaaeb4e0b90->Object::~Object { 0xaaaaeb4e0b40, 0xaaaaeb4e0b90 }
-0xaaaaeb4e0b40->Object::~Object { 0xaaaaeb4e0b40 }
+0xaaaac28b5c80->Object::Object { 0xaaaac28b5b40, 0xaaaac28b5b90, 0xaaaac28b5be0, 0xaaaac28b5c30 }
+Next token is token 'p' (0xaaaac28b5c80 'p'Exception caught: cleaning lookahead and stack
+0xaaaac28b5c80->Object::~Object { 0xaaaac28b5b40, 0xaaaac28b5b90, 0xaaaac28b5be0, 0xaaaac28b5c30, 0xaaaac28b5c80 }
+0xaaaac28b5c30->Object::~Object { 0xaaaac28b5b40, 0xaaaac28b5b90, 0xaaaac28b5be0, 0xaaaac28b5c30 }
+0xaaaac28b5be0->Object::~Object { 0xaaaac28b5b40, 0xaaaac28b5b90, 0xaaaac28b5be0 }
+0xaaaac28b5b90->Object::~Object { 0xaaaac28b5b40, 0xaaaac28b5b90 }
+0xaaaac28b5b40->Object::~Object { 0xaaaac28b5b40 }
 exception caught: printer
 end { }
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
+error: invalid expression
+caught error
+error: invalid character
+caught error
+./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-0xaaaaeb4e0b40->Object::Object { }
-Next token is token 'a' (0xaaaaeb4e0b40 'a')
-Shifting token 'a' (0xaaaaeb4e0b40 'a')
+0xaaaac28b5b40->Object::Object { }
+Next token is token 'a' (0xaaaac28b5b40 'a')
+Shifting token 'a' (0xaaaac28b5b40 'a')
 Entering state 2
 Stack now 0 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaeb4e0b40 'a')
--> $$ = nterm item (0xaaaaeb4e0b40 'a')
+   $1 = token 'a' (0xaaaac28b5b40 'a')
+-> $$ = nterm item (0xaaaac28b5b40 'a')
 Entering state 11
 Stack now 0 11
 Reading a token
-0xaaaaeb4e0b90->Object::Object { 0xaaaaeb4e0b40 }
-Next token is token 'a' (0xaaaaeb4e0b90 'a')
-Shifting token 'a' (0xaaaaeb4e0b90 'a')
+0xaaaac28b5b90->Object::Object { 0xaaaac28b5b40 }
+Next token is token 'a' (0xaaaac28b5b90 'a')
+Shifting token 'a' (0xaaaac28b5b90 'a')
 Entering state 2
 Stack now 0 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaeb4e0b90 'a')
--> $$ = nterm item (0xaaaaeb4e0b90 'a')
+   $1 = token 'a' (0xaaaac28b5b90 'a')
+-> $$ = nterm item (0xaaaac28b5b90 'a')
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xaaaaeb4e0be0->Object::Object { 0xaaaaeb4e0b40, 0xaaaaeb4e0b90 }
-Next token is token 'a' (0xaaaaeb4e0be0 'a')
-Shifting token 'a' (0xaaaaeb4e0be0 'a')
+0xaaaac28b5be0->Object::Object { 0xaaaac28b5b40, 0xaaaac28b5b90 }
+Next token is token 'a' (0xaaaac28b5be0 'a')
+Shifting token 'a' (0xaaaac28b5be0 'a')
 Entering state 2
 Stack now 0 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaeb4e0be0 'a')
--> $$ = nterm item (0xaaaaeb4e0be0 'a')
+   $1 = token 'a' (0xaaaac28b5be0 'a')
+-> $$ = nterm item (0xaaaac28b5be0 'a')
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xaaaaeb4e0c30->Object::Object { 0xaaaaeb4e0b40, 0xaaaaeb4e0b90, 0xaaaaeb4e0be0 }
-Next token is token 'a' (0xaaaaeb4e0c30 'a')
-Shifting token 'a' (0xaaaaeb4e0c30 'a')
+0xaaaac28b5c30->Object::Object { 0xaaaac28b5b40, 0xaaaac28b5b90, 0xaaaac28b5be0 }
+Next token is token 'a' (0xaaaac28b5c30 'a')
+Shifting token 'a' (0xaaaac28b5c30 'a')
 Entering state 2
 Stack now 0 11 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaeb4e0c30 'a')
--> $$ = nterm item (0xaaaaeb4e0c30 'a')
+   $1 = token 'a' (0xaaaac28b5c30 'a')
+-> $$ = nterm item (0xaaaac28b5c30 'a')
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xaaaaeb4e0c80->Object::Object { 0xaaaaeb4e0b40, 0xaaaaeb4e0b90, 0xaaaaeb4e0be0, 0xaaaaeb4e0c30 }
-Next token is token 'p' (0xaaaaeb4e0c80 'p'Exception caught: cleaning lookahead and stack
-0xaaaaeb4e0c80->Object::~Object { 0xaaaaeb4e0b40, 0xaaaaeb4e0b90, 0xaaaaeb4e0be0, 0xaaaaeb4e0c30, 0xaaaaeb4e0c80 }
-0xaaaaeb4e0c30->Object::~Object { 0xaaaaeb4e0b40, 0xaaaaeb4e0b90, 0xaaaaeb4e0be0, 0xaaaaeb4e0c30 }
-0xaaaaeb4e0be0->Object::~Object { 0xaaaaeb4e0b40, 0xaaaaeb4e0b90, 0xaaaaeb4e0be0 }
-0xaaaaeb4e0b90->Object::~Object { 0xaaaaeb4e0b40, 0xaaaaeb4e0b90 }
-0xaaaaeb4e0b40->Object::~Object { 0xaaaaeb4e0b40 }
+0xaaaac28b5c80->Object::Object { 0xaaaac28b5b40, 0xaaaac28b5b90, 0xaaaac28b5be0, 0xaaaac28b5c30 }
+Next token is token 'p' (0xaaaac28b5c80 'p'Exception caught: cleaning lookahead and stack
+0xaaaac28b5c80->Object::~Object { 0xaaaac28b5b40, 0xaaaac28b5b90, 0xaaaac28b5be0, 0xaaaac28b5c30, 0xaaaac28b5c80 }
+0xaaaac28b5c30->Object::~Object { 0xaaaac28b5b40, 0xaaaac28b5b90, 0xaaaac28b5be0, 0xaaaac28b5c30 }
+0xaaaac28b5be0->Object::~Object { 0xaaaac28b5b40, 0xaaaac28b5b90, 0xaaaac28b5be0 }
+0xaaaac28b5b90->Object::~Object { 0xaaaac28b5b40, 0xaaaac28b5b90 }
+0xaaaac28b5b40->Object::~Object { 0xaaaac28b5b40 }
 exception caught: printer
 end { }
 ./c++.at:1360: grep '^exception caught: printer$' stderr
 stdout:
+./c++.at:1064:  $PREPARSER ./input < in
 exception caught: printer
 ./c++.at:1360:  $PREPARSER ./input aaaae
 stderr:
+stderr:
 exception caught: syntax error
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+error: invalid expression
+./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1360:  $PREPARSER ./input aaaaE
 stderr:
+./c++.at:1064:  $PREPARSER ./input < in
 exception caught: syntax error, unexpected end of file, expecting 'a'
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+error: invalid character
+./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1360:  $PREPARSER ./input aaaaT
 stderr:
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+======== Testing with C++ standard flags: ''
+./c++.at:1064: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
 ./c++.at:1360:  $PREPARSER ./input aaaaR
 stderr:
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stdout:
+./c++.at:851:  $PREPARSER ./input
+stderr:
+./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:1360: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+======== Testing with C++ standard flags: ''
+./c++.at:851: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:1064:  $PREPARSER ./input < in
+./c++.at:857:  $PREPARSER ./input
 stderr:
-error: invalid expression
-caught error
-error: invalid character
-caught error
-./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1064:  $PREPARSER ./input < in
+./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:857: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
-error: invalid expression
-./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1064:  $PREPARSER ./input < in
+stdout:
+./c++.at:573: $here/modern
+stdout:
+Modern C++: 202100
+./c++.at:573:  $PREPARSER ./list
 stderr:
-error: invalid character
-./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Destroy: "0"
+Destroy: "0"
+Destroy: 1
+Destroy: "1"
+Destroy: (0)
+Destroy: "2"
+Destroy: "2"
+Destroy: (0, 1)
+Destroy: ""
+Destroy: 3
+Destroy: (0, 1, 2)
+Destroy: "4"
+Destroy: "4"
+Destroy: (0, 1, 2)
+Destroy: (0, 1, 2, 4)
+Destroy: 5
+Destroy: (0, 1, 2, 4)
+Destroy: "6"
+Destroy: "6"
+Destroy: (0, 1, 2, 4)
+Destroy: (0, 1, 2, 4, 6)
+./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+673. c++.at:573:  ok
+
+686. c++.at:1361: testing Exception safety without error recovery  ...
+./c++.at:1361: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS
+stderr:
+stdout:
+./c++.at:851:  $PREPARSER ./input
+stderr:
+./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:1064: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
+./c++.at:851: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./c++.at:857:  $PREPARSER ./input
+stderr:
+./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:857: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./c++.at:1361: ./exceptions || exit 77
+stderr:
+Inner caught
+Outer caught
+./c++.at:1361: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc --report=all input.yy
+======== Testing with C++ standard flags: ''
+./c++.at:1361: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./c++.at:1065:  $PREPARSER ./input < in
@@ -252751,13 +252835,6 @@
 ./c++.at:1065: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
 stderr:
 stdout:
-./c++.at:857:  $PREPARSER ./input
-stderr:
-./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:857: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
 ./c++.at:851:  $PREPARSER ./input
 stderr:
 ./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -252765,185 +252842,14 @@
 ./c++.at:851: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
-stderr:
-stdout:
-./c++.at:1361:  $PREPARSER ./input aaaas
-stderr:
-exception caught: reduction
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1361:  $PREPARSER ./input aaaal
-stderr:
-exception caught: yylex
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1361:  $PREPARSER ./input i
-stderr:
-exception caught: initial-action
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1361:  $PREPARSER ./input aaaap
-stderr:
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1361:  $PREPARSER ./input --debug aaaap
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-0xaaaaf6af2b40->Object::Object { }
-Next token is token 'a' (0xaaaaf6af2b40 'a')
-Shifting token 'a' (0xaaaaf6af2b40 'a')
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaf6af2b40 'a')
--> $$ = nterm item (0xaaaaf6af2b40 'a')
-Entering state 10
-Stack now 0 10
-Reading a token
-0xaaaaf6af2b90->Object::Object { 0xaaaaf6af2b40 }
-Next token is token 'a' (0xaaaaf6af2b90 'a')
-Shifting token 'a' (0xaaaaf6af2b90 'a')
-Entering state 1
-Stack now 0 10 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaf6af2b90 'a')
--> $$ = nterm item (0xaaaaf6af2b90 'a')
-Entering state 10
-Stack now 0 10 10
-Reading a token
-0xaaaaf6af2be0->Object::Object { 0xaaaaf6af2b40, 0xaaaaf6af2b90 }
-Next token is token 'a' (0xaaaaf6af2be0 'a')
-Shifting token 'a' (0xaaaaf6af2be0 'a')
-Entering state 1
-Stack now 0 10 10 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaf6af2be0 'a')
--> $$ = nterm item (0xaaaaf6af2be0 'a')
-Entering state 10
-Stack now 0 10 10 10
-Reading a token
-0xaaaaf6af2c30->Object::Object { 0xaaaaf6af2b40, 0xaaaaf6af2b90, 0xaaaaf6af2be0 }
-Next token is token 'a' (0xaaaaf6af2c30 'a')
-Shifting token 'a' (0xaaaaf6af2c30 'a')
-Entering state 1
-Stack now 0 10 10 10 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaf6af2c30 'a')
--> $$ = nterm item (0xaaaaf6af2c30 'a')
-Entering state 10
-Stack now 0 10 10 10 10
-Reading a token
-0xaaaaf6af2c80->Object::Object { 0xaaaaf6af2b40, 0xaaaaf6af2b90, 0xaaaaf6af2be0, 0xaaaaf6af2c30 }
-Next token is token 'p' (0xaaaaf6af2c80 'p'Exception caught: cleaning lookahead and stack
-0xaaaaf6af2c80->Object::~Object { 0xaaaaf6af2b40, 0xaaaaf6af2b90, 0xaaaaf6af2be0, 0xaaaaf6af2c30, 0xaaaaf6af2c80 }
-0xaaaaf6af2c30->Object::~Object { 0xaaaaf6af2b40, 0xaaaaf6af2b90, 0xaaaaf6af2be0, 0xaaaaf6af2c30 }
-0xaaaaf6af2be0->Object::~Object { 0xaaaaf6af2b40, 0xaaaaf6af2b90, 0xaaaaf6af2be0 }
-0xaaaaf6af2b90->Object::~Object { 0xaaaaf6af2b40, 0xaaaaf6af2b90 }
-0xaaaaf6af2b40->Object::~Object { 0xaaaaf6af2b40 }
-exception caught: printer
-end { }
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-0xaaaaf6af2b40->Object::Object { }
-Next token is token 'a' (0xaaaaf6af2b40 'a')
-Shifting token 'a' (0xaaaaf6af2b40 'a')
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaf6af2b40 'a')
--> $$ = nterm item (0xaaaaf6af2b40 'a')
-Entering state 10
-Stack now 0 10
-Reading a token
-0xaaaaf6af2b90->Object::Object { 0xaaaaf6af2b40 }
-Next token is token 'a' (0xaaaaf6af2b90 'a')
-Shifting token 'a' (0xaaaaf6af2b90 'a')
-Entering state 1
-Stack now 0 10 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaf6af2b90 'a')
--> $$ = nterm item (0xaaaaf6af2b90 'a')
-Entering state 10
-Stack now 0 10 10
-Reading a token
-0xaaaaf6af2be0->Object::Object { 0xaaaaf6af2b40, 0xaaaaf6af2b90 }
-Next token is token 'a' (0xaaaaf6af2be0 'a')
-Shifting token 'a' (0xaaaaf6af2be0 'a')
-Entering state 1
-Stack now 0 10 10 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaf6af2be0 'a')
--> $$ = nterm item (0xaaaaf6af2be0 'a')
-Entering state 10
-Stack now 0 10 10 10
-Reading a token
-0xaaaaf6af2c30->Object::Object { 0xaaaaf6af2b40, 0xaaaaf6af2b90, 0xaaaaf6af2be0 }
-Next token is token 'a' (0xaaaaf6af2c30 'a')
-Shifting token 'a' (0xaaaaf6af2c30 'a')
-Entering state 1
-Stack now 0 10 10 10 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaf6af2c30 'a')
--> $$ = nterm item (0xaaaaf6af2c30 'a')
-Entering state 10
-Stack now 0 10 10 10 10
-Reading a token
-0xaaaaf6af2c80->Object::Object { 0xaaaaf6af2b40, 0xaaaaf6af2b90, 0xaaaaf6af2be0, 0xaaaaf6af2c30 }
-Next token is token 'p' (0xaaaaf6af2c80 'p'Exception caught: cleaning lookahead and stack
-0xaaaaf6af2c80->Object::~Object { 0xaaaaf6af2b40, 0xaaaaf6af2b90, 0xaaaaf6af2be0, 0xaaaaf6af2c30, 0xaaaaf6af2c80 }
-0xaaaaf6af2c30->Object::~Object { 0xaaaaf6af2b40, 0xaaaaf6af2b90, 0xaaaaf6af2be0, 0xaaaaf6af2c30 }
-0xaaaaf6af2be0->Object::~Object { 0xaaaaf6af2b40, 0xaaaaf6af2b90, 0xaaaaf6af2be0 }
-0xaaaaf6af2b90->Object::~Object { 0xaaaaf6af2b40, 0xaaaaf6af2b90 }
-0xaaaaf6af2b40->Object::~Object { 0xaaaaf6af2b40 }
-exception caught: printer
-end { }
-./c++.at:1361: grep '^exception caught: printer$' stderr
-stdout:
-exception caught: printer
-./c++.at:1361:  $PREPARSER ./input aaaae
-stderr:
-exception caught: syntax error
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1361:  $PREPARSER ./input aaaaE
-stderr:
-exception caught: syntax error, unexpected end of file, expecting 'a'
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1361:  $PREPARSER ./input aaaaT
-stderr:
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1361:  $PREPARSER ./input aaaaR
-stderr:
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:1361: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
 ./c++.at:1360:  $PREPARSER ./input aaaas
 stderr:
-stderr:
-stdout:
-stderr:
-./c++.at:851:  $PREPARSER ./input
 exception caught: reduction
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stdout:
-./c++.at:857:  $PREPARSER ./input
 ./c++.at:1360:  $PREPARSER ./input aaaal
 stderr:
-stderr:
-stderr:
-./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 exception caught: yylex
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:851: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-======== Testing with C++ standard flags: ''
-./c++.at:857: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 ./c++.at:1360:  $PREPARSER ./input i
 stderr:
 exception caught: initial-action
@@ -252957,57 +252863,57 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xaaaafbe66b40->Object::Object { }
-Next token is token 'a' (0xaaaafbe66b40 'a')
-Shifting token 'a' (0xaaaafbe66b40 'a')
+0xaaaae149ab40->Object::Object { }
+Next token is token 'a' (0xaaaae149ab40 'a')
+Shifting token 'a' (0xaaaae149ab40 'a')
 Entering state 2
 Stack now 0 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaafbe66b40 'a')
--> $$ = nterm item (0xaaaafbe66b40 'a')
+   $1 = token 'a' (0xaaaae149ab40 'a')
+-> $$ = nterm item (0xaaaae149ab40 'a')
 Entering state 11
 Stack now 0 11
 Reading a token
-0xaaaafbe66b90->Object::Object { 0xaaaafbe66b40 }
-Next token is token 'a' (0xaaaafbe66b90 'a')
-Shifting token 'a' (0xaaaafbe66b90 'a')
+0xaaaae149ab90->Object::Object { 0xaaaae149ab40 }
+Next token is token 'a' (0xaaaae149ab90 'a')
+Shifting token 'a' (0xaaaae149ab90 'a')
 Entering state 2
 Stack now 0 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaafbe66b90 'a')
--> $$ = nterm item (0xaaaafbe66b90 'a')
+   $1 = token 'a' (0xaaaae149ab90 'a')
+-> $$ = nterm item (0xaaaae149ab90 'a')
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xaaaafbe66be0->Object::Object { 0xaaaafbe66b40, 0xaaaafbe66b90 }
-Next token is token 'a' (0xaaaafbe66be0 'a')
-Shifting token 'a' (0xaaaafbe66be0 'a')
+0xaaaae149abe0->Object::Object { 0xaaaae149ab40, 0xaaaae149ab90 }
+Next token is token 'a' (0xaaaae149abe0 'a')
+Shifting token 'a' (0xaaaae149abe0 'a')
 Entering state 2
 Stack now 0 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaafbe66be0 'a')
--> $$ = nterm item (0xaaaafbe66be0 'a')
+   $1 = token 'a' (0xaaaae149abe0 'a')
+-> $$ = nterm item (0xaaaae149abe0 'a')
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xaaaafbe66c30->Object::Object { 0xaaaafbe66b40, 0xaaaafbe66b90, 0xaaaafbe66be0 }
-Next token is token 'a' (0xaaaafbe66c30 'a')
-Shifting token 'a' (0xaaaafbe66c30 'a')
+0xaaaae149ac30->Object::Object { 0xaaaae149ab40, 0xaaaae149ab90, 0xaaaae149abe0 }
+Next token is token 'a' (0xaaaae149ac30 'a')
+Shifting token 'a' (0xaaaae149ac30 'a')
 Entering state 2
 Stack now 0 11 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaafbe66c30 'a')
--> $$ = nterm item (0xaaaafbe66c30 'a')
+   $1 = token 'a' (0xaaaae149ac30 'a')
+-> $$ = nterm item (0xaaaae149ac30 'a')
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xaaaafbe66c80->Object::Object { 0xaaaafbe66b40, 0xaaaafbe66b90, 0xaaaafbe66be0, 0xaaaafbe66c30 }
-Next token is token 'p' (0xaaaafbe66c80 'p'Exception caught: cleaning lookahead and stack
-0xaaaafbe66c80->Object::~Object { 0xaaaafbe66b40, 0xaaaafbe66b90, 0xaaaafbe66be0, 0xaaaafbe66c30, 0xaaaafbe66c80 }
-0xaaaafbe66c30->Object::~Object { 0xaaaafbe66b40, 0xaaaafbe66b90, 0xaaaafbe66be0, 0xaaaafbe66c30 }
-0xaaaafbe66be0->Object::~Object { 0xaaaafbe66b40, 0xaaaafbe66b90, 0xaaaafbe66be0 }
-0xaaaafbe66b90->Object::~Object { 0xaaaafbe66b40, 0xaaaafbe66b90 }
-0xaaaafbe66b40->Object::~Object { 0xaaaafbe66b40 }
+0xaaaae149ac80->Object::Object { 0xaaaae149ab40, 0xaaaae149ab90, 0xaaaae149abe0, 0xaaaae149ac30 }
+Next token is token 'p' (0xaaaae149ac80 'p'Exception caught: cleaning lookahead and stack
+0xaaaae149ac80->Object::~Object { 0xaaaae149ab40, 0xaaaae149ab90, 0xaaaae149abe0, 0xaaaae149ac30, 0xaaaae149ac80 }
+0xaaaae149ac30->Object::~Object { 0xaaaae149ab40, 0xaaaae149ab90, 0xaaaae149abe0, 0xaaaae149ac30 }
+0xaaaae149abe0->Object::~Object { 0xaaaae149ab40, 0xaaaae149ab90, 0xaaaae149abe0 }
+0xaaaae149ab90->Object::~Object { 0xaaaae149ab40, 0xaaaae149ab90 }
+0xaaaae149ab40->Object::~Object { 0xaaaae149ab40 }
 exception caught: printer
 end { }
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -253016,57 +252922,57 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xaaaafbe66b40->Object::Object { }
-Next token is token 'a' (0xaaaafbe66b40 'a')
-Shifting token 'a' (0xaaaafbe66b40 'a')
+0xaaaae149ab40->Object::Object { }
+Next token is token 'a' (0xaaaae149ab40 'a')
+Shifting token 'a' (0xaaaae149ab40 'a')
 Entering state 2
 Stack now 0 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaafbe66b40 'a')
--> $$ = nterm item (0xaaaafbe66b40 'a')
+   $1 = token 'a' (0xaaaae149ab40 'a')
+-> $$ = nterm item (0xaaaae149ab40 'a')
 Entering state 11
 Stack now 0 11
 Reading a token
-0xaaaafbe66b90->Object::Object { 0xaaaafbe66b40 }
-Next token is token 'a' (0xaaaafbe66b90 'a')
-Shifting token 'a' (0xaaaafbe66b90 'a')
+0xaaaae149ab90->Object::Object { 0xaaaae149ab40 }
+Next token is token 'a' (0xaaaae149ab90 'a')
+Shifting token 'a' (0xaaaae149ab90 'a')
 Entering state 2
 Stack now 0 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaafbe66b90 'a')
--> $$ = nterm item (0xaaaafbe66b90 'a')
+   $1 = token 'a' (0xaaaae149ab90 'a')
+-> $$ = nterm item (0xaaaae149ab90 'a')
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xaaaafbe66be0->Object::Object { 0xaaaafbe66b40, 0xaaaafbe66b90 }
-Next token is token 'a' (0xaaaafbe66be0 'a')
-Shifting token 'a' (0xaaaafbe66be0 'a')
+0xaaaae149abe0->Object::Object { 0xaaaae149ab40, 0xaaaae149ab90 }
+Next token is token 'a' (0xaaaae149abe0 'a')
+Shifting token 'a' (0xaaaae149abe0 'a')
 Entering state 2
 Stack now 0 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaafbe66be0 'a')
--> $$ = nterm item (0xaaaafbe66be0 'a')
+   $1 = token 'a' (0xaaaae149abe0 'a')
+-> $$ = nterm item (0xaaaae149abe0 'a')
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xaaaafbe66c30->Object::Object { 0xaaaafbe66b40, 0xaaaafbe66b90, 0xaaaafbe66be0 }
-Next token is token 'a' (0xaaaafbe66c30 'a')
-Shifting token 'a' (0xaaaafbe66c30 'a')
+0xaaaae149ac30->Object::Object { 0xaaaae149ab40, 0xaaaae149ab90, 0xaaaae149abe0 }
+Next token is token 'a' (0xaaaae149ac30 'a')
+Shifting token 'a' (0xaaaae149ac30 'a')
 Entering state 2
 Stack now 0 11 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaafbe66c30 'a')
--> $$ = nterm item (0xaaaafbe66c30 'a')
+   $1 = token 'a' (0xaaaae149ac30 'a')
+-> $$ = nterm item (0xaaaae149ac30 'a')
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xaaaafbe66c80->Object::Object { 0xaaaafbe66b40, 0xaaaafbe66b90, 0xaaaafbe66be0, 0xaaaafbe66c30 }
-Next token is token 'p' (0xaaaafbe66c80 'p'Exception caught: cleaning lookahead and stack
-0xaaaafbe66c80->Object::~Object { 0xaaaafbe66b40, 0xaaaafbe66b90, 0xaaaafbe66be0, 0xaaaafbe66c30, 0xaaaafbe66c80 }
-0xaaaafbe66c30->Object::~Object { 0xaaaafbe66b40, 0xaaaafbe66b90, 0xaaaafbe66be0, 0xaaaafbe66c30 }
-0xaaaafbe66be0->Object::~Object { 0xaaaafbe66b40, 0xaaaafbe66b90, 0xaaaafbe66be0 }
-0xaaaafbe66b90->Object::~Object { 0xaaaafbe66b40, 0xaaaafbe66b90 }
-0xaaaafbe66b40->Object::~Object { 0xaaaafbe66b40 }
+0xaaaae149ac80->Object::Object { 0xaaaae149ab40, 0xaaaae149ab90, 0xaaaae149abe0, 0xaaaae149ac30 }
+Next token is token 'p' (0xaaaae149ac80 'p'Exception caught: cleaning lookahead and stack
+0xaaaae149ac80->Object::~Object { 0xaaaae149ab40, 0xaaaae149ab90, 0xaaaae149abe0, 0xaaaae149ac30, 0xaaaae149ac80 }
+0xaaaae149ac30->Object::~Object { 0xaaaae149ab40, 0xaaaae149ab90, 0xaaaae149abe0, 0xaaaae149ac30 }
+0xaaaae149abe0->Object::~Object { 0xaaaae149ab40, 0xaaaae149ab90, 0xaaaae149abe0 }
+0xaaaae149ab90->Object::~Object { 0xaaaae149ab40, 0xaaaae149ab90 }
+0xaaaae149ab40->Object::~Object { 0xaaaae149ab40 }
 exception caught: printer
 end { }
 ./c++.at:1360: grep '^exception caught: printer$' stderr
@@ -253090,98 +252996,30 @@
 ./c++.at:1360: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:574: $here/modern
-stdout:
-Modern C++: 202100
-./c++.at:574:  $PREPARSER ./list
-stderr:
-Destroy: ""
-Destroy: ""
-Destroy: 1
-Destroy: ""
-Destroy: ()
-Destroy: ""
-Destroy: ""
-Destroy: ()
-Destroy: ""
-Destroy: 3
-Destroy: ()
-Destroy: ""
-Destroy: ""
-Destroy: ()
-Destroy: ()
-Destroy: 5
-Destroy: ()
-Destroy: ""
-Destroy: ""
-Destroy: ()
-Destroy: (0, 1, 2, 4, 6)
-./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-674. c++.at:574:  ok
-
-687. c++.at:1362: testing Exception safety with error recovery api.value.type=variant ...
-./c++.at:1362: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS
-stderr:
-stdout:
-stderr:
-./c++.at:1065:  $PREPARSER ./input < in
-stdout:
-./c++.at:1362: ./exceptions || exit 77
-stderr:
-stderr:
-Inner caught
-Outer caught
-stdout:
-stderr:
 ./c++.at:1064:  $PREPARSER ./input < in
-./c++.at:1362: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc --report=all input.yy
-error: invalid expression
-caught error
-error: invalid character
-caught error
-./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 error: invalid expression
 caught error
 error: invalid character
 caught error
 ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1065:  $PREPARSER ./input < in
 ./c++.at:1064:  $PREPARSER ./input < in
 stderr:
-error: invalid expression
-./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stdout:
 stderr:
-./c++.at:1064:  $PREPARSER ./input < in
 error: invalid expression
-./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-error: invalid character
 ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1065:  $PREPARSER ./input < in
-======== Testing with C++ standard flags: ''
-stderr:
-./c++.at:1064: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
-error: invalid character
-./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:1065: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
-======== Testing with C++ standard flags: ''
-./c++.at:1362: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
 ./c++.at:857:  $PREPARSER ./input
 stderr:
 ./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1064:  $PREPARSER ./input < in
 ======== Testing with C++ standard flags: ''
-./c++.at:857: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./c++.at:851:  $PREPARSER ./input
 stderr:
-./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:857: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+error: invalid character
+./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:851: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./c++.at:1064: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
 stderr:
 stdout:
 ./c++.at:1066:  $PREPARSER ./input < in
@@ -253207,6 +253045,56 @@
 ./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
 stderr:
 stdout:
+./c++.at:574: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS
+stderr:
+stdout:
+./c++.at:851:  $PREPARSER ./input
+stderr:
+./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:851: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./c++.at:574: $here/modern
+stdout:
+Modern C++: 202100
+./c++.at:574:  $PREPARSER ./list
+stderr:
+Destroy: ""
+Destroy: ""
+Destroy: 1
+Destroy: ""
+Destroy: ()
+Destroy: ""
+Destroy: ""
+Destroy: ()
+Destroy: ""
+Destroy: 3
+Destroy: ()
+Destroy: ""
+Destroy: ""
+Destroy: ()
+Destroy: ()
+Destroy: 5
+Destroy: ()
+Destroy: ""
+Destroy: ""
+Destroy: ()
+Destroy: (0, 1, 2, 4, 6)
+stderr:
+./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stdout:
+./c++.at:857:  $PREPARSER ./input
+stderr:
+./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+674. c++.at:574:  ok
+======== Testing with C++ standard flags: ''
+./c++.at:857: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+
+687. c++.at:1362: testing Exception safety with error recovery api.value.type=variant ...
+./c++.at:1362: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS
+stderr:
+stdout:
 ./c++.at:1361:  $PREPARSER ./input aaaas
 stderr:
 exception caught: reduction
@@ -253228,57 +253116,57 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xaaab1c0b3b40->Object::Object { }
-Next token is token 'a' (0xaaab1c0b3b40 'a')
-Shifting token 'a' (0xaaab1c0b3b40 'a')
+0xaaaafcd0eb40->Object::Object { }
+Next token is token 'a' (0xaaaafcd0eb40 'a')
+Shifting token 'a' (0xaaaafcd0eb40 'a')
 Entering state 1
 Stack now 0 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab1c0b3b40 'a')
--> $$ = nterm item (0xaaab1c0b3b40 'a')
+   $1 = token 'a' (0xaaaafcd0eb40 'a')
+-> $$ = nterm item (0xaaaafcd0eb40 'a')
 Entering state 10
 Stack now 0 10
 Reading a token
-0xaaab1c0b3b90->Object::Object { 0xaaab1c0b3b40 }
-Next token is token 'a' (0xaaab1c0b3b90 'a')
-Shifting token 'a' (0xaaab1c0b3b90 'a')
+0xaaaafcd0eb90->Object::Object { 0xaaaafcd0eb40 }
+Next token is token 'a' (0xaaaafcd0eb90 'a')
+Shifting token 'a' (0xaaaafcd0eb90 'a')
 Entering state 1
 Stack now 0 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab1c0b3b90 'a')
--> $$ = nterm item (0xaaab1c0b3b90 'a')
+   $1 = token 'a' (0xaaaafcd0eb90 'a')
+-> $$ = nterm item (0xaaaafcd0eb90 'a')
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xaaab1c0b3be0->Object::Object { 0xaaab1c0b3b40, 0xaaab1c0b3b90 }
-Next token is token 'a' (0xaaab1c0b3be0 'a')
-Shifting token 'a' (0xaaab1c0b3be0 'a')
+0xaaaafcd0ebe0->Object::Object { 0xaaaafcd0eb40, 0xaaaafcd0eb90 }
+Next token is token 'a' (0xaaaafcd0ebe0 'a')
+Shifting token 'a' (0xaaaafcd0ebe0 'a')
 Entering state 1
 Stack now 0 10 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab1c0b3be0 'a')
--> $$ = nterm item (0xaaab1c0b3be0 'a')
+   $1 = token 'a' (0xaaaafcd0ebe0 'a')
+-> $$ = nterm item (0xaaaafcd0ebe0 'a')
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xaaab1c0b3c30->Object::Object { 0xaaab1c0b3b40, 0xaaab1c0b3b90, 0xaaab1c0b3be0 }
-Next token is token 'a' (0xaaab1c0b3c30 'a')
-Shifting token 'a' (0xaaab1c0b3c30 'a')
+0xaaaafcd0ec30->Object::Object { 0xaaaafcd0eb40, 0xaaaafcd0eb90, 0xaaaafcd0ebe0 }
+Next token is token 'a' (0xaaaafcd0ec30 'a')
+Shifting token 'a' (0xaaaafcd0ec30 'a')
 Entering state 1
 Stack now 0 10 10 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab1c0b3c30 'a')
--> $$ = nterm item (0xaaab1c0b3c30 'a')
+   $1 = token 'a' (0xaaaafcd0ec30 'a')
+-> $$ = nterm item (0xaaaafcd0ec30 'a')
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xaaab1c0b3c80->Object::Object { 0xaaab1c0b3b40, 0xaaab1c0b3b90, 0xaaab1c0b3be0, 0xaaab1c0b3c30 }
-Next token is token 'p' (0xaaab1c0b3c80 'p'Exception caught: cleaning lookahead and stack
-0xaaab1c0b3c80->Object::~Object { 0xaaab1c0b3b40, 0xaaab1c0b3b90, 0xaaab1c0b3be0, 0xaaab1c0b3c30, 0xaaab1c0b3c80 }
-0xaaab1c0b3c30->Object::~Object { 0xaaab1c0b3b40, 0xaaab1c0b3b90, 0xaaab1c0b3be0, 0xaaab1c0b3c30 }
-0xaaab1c0b3be0->Object::~Object { 0xaaab1c0b3b40, 0xaaab1c0b3b90, 0xaaab1c0b3be0 }
-0xaaab1c0b3b90->Object::~Object { 0xaaab1c0b3b40, 0xaaab1c0b3b90 }
-0xaaab1c0b3b40->Object::~Object { 0xaaab1c0b3b40 }
+0xaaaafcd0ec80->Object::Object { 0xaaaafcd0eb40, 0xaaaafcd0eb90, 0xaaaafcd0ebe0, 0xaaaafcd0ec30 }
+Next token is token 'p' (0xaaaafcd0ec80 'p'Exception caught: cleaning lookahead and stack
+0xaaaafcd0ec80->Object::~Object { 0xaaaafcd0eb40, 0xaaaafcd0eb90, 0xaaaafcd0ebe0, 0xaaaafcd0ec30, 0xaaaafcd0ec80 }
+0xaaaafcd0ec30->Object::~Object { 0xaaaafcd0eb40, 0xaaaafcd0eb90, 0xaaaafcd0ebe0, 0xaaaafcd0ec30 }
+0xaaaafcd0ebe0->Object::~Object { 0xaaaafcd0eb40, 0xaaaafcd0eb90, 0xaaaafcd0ebe0 }
+0xaaaafcd0eb90->Object::~Object { 0xaaaafcd0eb40, 0xaaaafcd0eb90 }
+0xaaaafcd0eb40->Object::~Object { 0xaaaafcd0eb40 }
 exception caught: printer
 end { }
 ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -253287,57 +253175,57 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xaaab1c0b3b40->Object::Object { }
-Next token is token 'a' (0xaaab1c0b3b40 'a')
-Shifting token 'a' (0xaaab1c0b3b40 'a')
+0xaaaafcd0eb40->Object::Object { }
+Next token is token 'a' (0xaaaafcd0eb40 'a')
+Shifting token 'a' (0xaaaafcd0eb40 'a')
 Entering state 1
 Stack now 0 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab1c0b3b40 'a')
--> $$ = nterm item (0xaaab1c0b3b40 'a')
+   $1 = token 'a' (0xaaaafcd0eb40 'a')
+-> $$ = nterm item (0xaaaafcd0eb40 'a')
 Entering state 10
 Stack now 0 10
 Reading a token
-0xaaab1c0b3b90->Object::Object { 0xaaab1c0b3b40 }
-Next token is token 'a' (0xaaab1c0b3b90 'a')
-Shifting token 'a' (0xaaab1c0b3b90 'a')
+0xaaaafcd0eb90->Object::Object { 0xaaaafcd0eb40 }
+Next token is token 'a' (0xaaaafcd0eb90 'a')
+Shifting token 'a' (0xaaaafcd0eb90 'a')
 Entering state 1
 Stack now 0 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab1c0b3b90 'a')
--> $$ = nterm item (0xaaab1c0b3b90 'a')
+   $1 = token 'a' (0xaaaafcd0eb90 'a')
+-> $$ = nterm item (0xaaaafcd0eb90 'a')
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xaaab1c0b3be0->Object::Object { 0xaaab1c0b3b40, 0xaaab1c0b3b90 }
-Next token is token 'a' (0xaaab1c0b3be0 'a')
-Shifting token 'a' (0xaaab1c0b3be0 'a')
+0xaaaafcd0ebe0->Object::Object { 0xaaaafcd0eb40, 0xaaaafcd0eb90 }
+Next token is token 'a' (0xaaaafcd0ebe0 'a')
+Shifting token 'a' (0xaaaafcd0ebe0 'a')
 Entering state 1
 Stack now 0 10 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab1c0b3be0 'a')
--> $$ = nterm item (0xaaab1c0b3be0 'a')
+   $1 = token 'a' (0xaaaafcd0ebe0 'a')
+-> $$ = nterm item (0xaaaafcd0ebe0 'a')
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xaaab1c0b3c30->Object::Object { 0xaaab1c0b3b40, 0xaaab1c0b3b90, 0xaaab1c0b3be0 }
-Next token is token 'a' (0xaaab1c0b3c30 'a')
-Shifting token 'a' (0xaaab1c0b3c30 'a')
+0xaaaafcd0ec30->Object::Object { 0xaaaafcd0eb40, 0xaaaafcd0eb90, 0xaaaafcd0ebe0 }
+Next token is token 'a' (0xaaaafcd0ec30 'a')
+Shifting token 'a' (0xaaaafcd0ec30 'a')
 Entering state 1
 Stack now 0 10 10 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab1c0b3c30 'a')
--> $$ = nterm item (0xaaab1c0b3c30 'a')
+   $1 = token 'a' (0xaaaafcd0ec30 'a')
+-> $$ = nterm item (0xaaaafcd0ec30 'a')
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xaaab1c0b3c80->Object::Object { 0xaaab1c0b3b40, 0xaaab1c0b3b90, 0xaaab1c0b3be0, 0xaaab1c0b3c30 }
-Next token is token 'p' (0xaaab1c0b3c80 'p'Exception caught: cleaning lookahead and stack
-0xaaab1c0b3c80->Object::~Object { 0xaaab1c0b3b40, 0xaaab1c0b3b90, 0xaaab1c0b3be0, 0xaaab1c0b3c30, 0xaaab1c0b3c80 }
-0xaaab1c0b3c30->Object::~Object { 0xaaab1c0b3b40, 0xaaab1c0b3b90, 0xaaab1c0b3be0, 0xaaab1c0b3c30 }
-0xaaab1c0b3be0->Object::~Object { 0xaaab1c0b3b40, 0xaaab1c0b3b90, 0xaaab1c0b3be0 }
-0xaaab1c0b3b90->Object::~Object { 0xaaab1c0b3b40, 0xaaab1c0b3b90 }
-0xaaab1c0b3b40->Object::~Object { 0xaaab1c0b3b40 }
+0xaaaafcd0ec80->Object::Object { 0xaaaafcd0eb40, 0xaaaafcd0eb90, 0xaaaafcd0ebe0, 0xaaaafcd0ec30 }
+Next token is token 'p' (0xaaaafcd0ec80 'p'Exception caught: cleaning lookahead and stack
+0xaaaafcd0ec80->Object::~Object { 0xaaaafcd0eb40, 0xaaaafcd0eb90, 0xaaaafcd0ebe0, 0xaaaafcd0ec30, 0xaaaafcd0ec80 }
+0xaaaafcd0ec30->Object::~Object { 0xaaaafcd0eb40, 0xaaaafcd0eb90, 0xaaaafcd0ebe0, 0xaaaafcd0ec30 }
+0xaaaafcd0ebe0->Object::~Object { 0xaaaafcd0eb40, 0xaaaafcd0eb90, 0xaaaafcd0ebe0 }
+0xaaaafcd0eb90->Object::~Object { 0xaaaafcd0eb40, 0xaaaafcd0eb90 }
+0xaaaafcd0eb40->Object::~Object { 0xaaaafcd0eb40 }
 exception caught: printer
 end { }
 ./c++.at:1361: grep '^exception caught: printer$' stderr
@@ -253361,6 +253249,41 @@
 ./c++.at:1361: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
+./c++.at:1065:  $PREPARSER ./input < in
+stderr:
+error: invalid expression
+caught error
+error: invalid character
+caught error
+./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1065:  $PREPARSER ./input < in
+stderr:
+error: invalid expression
+./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1065:  $PREPARSER ./input < in
+stderr:
+error: invalid character
+./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:1065: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
+stderr:
+stdout:
+./c++.at:1362: ./exceptions || exit 77
+stderr:
+Inner caught
+Outer caught
+./c++.at:1362: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc --report=all input.yy
+stderr:
+stdout:
+./c++.at:851:  $PREPARSER ./input
+stderr:
+./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:851: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+======== Testing with C++ standard flags: ''
+./c++.at:1362: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
 ./c++.at:1360:  $PREPARSER ./input aaaas
 stderr:
 exception caught: reduction
@@ -253382,57 +253305,57 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xaaaaea841b40->Object::Object { }
-Next token is token 'a' (0xaaaaea841b40 'a')
-Shifting token 'a' (0xaaaaea841b40 'a')
+0xaaaaf9a44b40->Object::Object { }
+Next token is token 'a' (0xaaaaf9a44b40 'a')
+Shifting token 'a' (0xaaaaf9a44b40 'a')
 Entering state 2
 Stack now 0 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaea841b40 'a')
--> $$ = nterm item (0xaaaaea841b40 'a')
+   $1 = token 'a' (0xaaaaf9a44b40 'a')
+-> $$ = nterm item (0xaaaaf9a44b40 'a')
 Entering state 11
 Stack now 0 11
 Reading a token
-0xaaaaea841b90->Object::Object { 0xaaaaea841b40 }
-Next token is token 'a' (0xaaaaea841b90 'a')
-Shifting token 'a' (0xaaaaea841b90 'a')
+0xaaaaf9a44b90->Object::Object { 0xaaaaf9a44b40 }
+Next token is token 'a' (0xaaaaf9a44b90 'a')
+Shifting token 'a' (0xaaaaf9a44b90 'a')
 Entering state 2
 Stack now 0 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaea841b90 'a')
--> $$ = nterm item (0xaaaaea841b90 'a')
+   $1 = token 'a' (0xaaaaf9a44b90 'a')
+-> $$ = nterm item (0xaaaaf9a44b90 'a')
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xaaaaea841be0->Object::Object { 0xaaaaea841b40, 0xaaaaea841b90 }
-Next token is token 'a' (0xaaaaea841be0 'a')
-Shifting token 'a' (0xaaaaea841be0 'a')
+0xaaaaf9a44be0->Object::Object { 0xaaaaf9a44b40, 0xaaaaf9a44b90 }
+Next token is token 'a' (0xaaaaf9a44be0 'a')
+Shifting token 'a' (0xaaaaf9a44be0 'a')
 Entering state 2
 Stack now 0 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaea841be0 'a')
--> $$ = nterm item (0xaaaaea841be0 'a')
+   $1 = token 'a' (0xaaaaf9a44be0 'a')
+-> $$ = nterm item (0xaaaaf9a44be0 'a')
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xaaaaea841c30->Object::Object { 0xaaaaea841b40, 0xaaaaea841b90, 0xaaaaea841be0 }
-Next token is token 'a' (0xaaaaea841c30 'a')
-Shifting token 'a' (0xaaaaea841c30 'a')
+0xaaaaf9a44c30->Object::Object { 0xaaaaf9a44b40, 0xaaaaf9a44b90, 0xaaaaf9a44be0 }
+Next token is token 'a' (0xaaaaf9a44c30 'a')
+Shifting token 'a' (0xaaaaf9a44c30 'a')
 Entering state 2
 Stack now 0 11 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaea841c30 'a')
--> $$ = nterm item (0xaaaaea841c30 'a')
+   $1 = token 'a' (0xaaaaf9a44c30 'a')
+-> $$ = nterm item (0xaaaaf9a44c30 'a')
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xaaaaea841c80->Object::Object { 0xaaaaea841b40, 0xaaaaea841b90, 0xaaaaea841be0, 0xaaaaea841c30 }
-Next token is token 'p' (0xaaaaea841c80 'p'Exception caught: cleaning lookahead and stack
-0xaaaaea841c80->Object::~Object { 0xaaaaea841b40, 0xaaaaea841b90, 0xaaaaea841be0, 0xaaaaea841c30, 0xaaaaea841c80 }
-0xaaaaea841c30->Object::~Object { 0xaaaaea841b40, 0xaaaaea841b90, 0xaaaaea841be0, 0xaaaaea841c30 }
-0xaaaaea841be0->Object::~Object { 0xaaaaea841b40, 0xaaaaea841b90, 0xaaaaea841be0 }
-0xaaaaea841b90->Object::~Object { 0xaaaaea841b40, 0xaaaaea841b90 }
-0xaaaaea841b40->Object::~Object { 0xaaaaea841b40 }
+0xaaaaf9a44c80->Object::Object { 0xaaaaf9a44b40, 0xaaaaf9a44b90, 0xaaaaf9a44be0, 0xaaaaf9a44c30 }
+Next token is token 'p' (0xaaaaf9a44c80 'p'Exception caught: cleaning lookahead and stack
+0xaaaaf9a44c80->Object::~Object { 0xaaaaf9a44b40, 0xaaaaf9a44b90, 0xaaaaf9a44be0, 0xaaaaf9a44c30, 0xaaaaf9a44c80 }
+0xaaaaf9a44c30->Object::~Object { 0xaaaaf9a44b40, 0xaaaaf9a44b90, 0xaaaaf9a44be0, 0xaaaaf9a44c30 }
+0xaaaaf9a44be0->Object::~Object { 0xaaaaf9a44b40, 0xaaaaf9a44b90, 0xaaaaf9a44be0 }
+0xaaaaf9a44b90->Object::~Object { 0xaaaaf9a44b40, 0xaaaaf9a44b90 }
+0xaaaaf9a44b40->Object::~Object { 0xaaaaf9a44b40 }
 exception caught: printer
 end { }
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -253441,57 +253364,57 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xaaaaea841b40->Object::Object { }
-Next token is token 'a' (0xaaaaea841b40 'a')
-Shifting token 'a' (0xaaaaea841b40 'a')
+0xaaaaf9a44b40->Object::Object { }
+Next token is token 'a' (0xaaaaf9a44b40 'a')
+Shifting token 'a' (0xaaaaf9a44b40 'a')
 Entering state 2
 Stack now 0 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaea841b40 'a')
--> $$ = nterm item (0xaaaaea841b40 'a')
+   $1 = token 'a' (0xaaaaf9a44b40 'a')
+-> $$ = nterm item (0xaaaaf9a44b40 'a')
 Entering state 11
 Stack now 0 11
 Reading a token
-0xaaaaea841b90->Object::Object { 0xaaaaea841b40 }
-Next token is token 'a' (0xaaaaea841b90 'a')
-Shifting token 'a' (0xaaaaea841b90 'a')
+0xaaaaf9a44b90->Object::Object { 0xaaaaf9a44b40 }
+Next token is token 'a' (0xaaaaf9a44b90 'a')
+Shifting token 'a' (0xaaaaf9a44b90 'a')
 Entering state 2
 Stack now 0 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaea841b90 'a')
--> $$ = nterm item (0xaaaaea841b90 'a')
+   $1 = token 'a' (0xaaaaf9a44b90 'a')
+-> $$ = nterm item (0xaaaaf9a44b90 'a')
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xaaaaea841be0->Object::Object { 0xaaaaea841b40, 0xaaaaea841b90 }
-Next token is token 'a' (0xaaaaea841be0 'a')
-Shifting token 'a' (0xaaaaea841be0 'a')
+0xaaaaf9a44be0->Object::Object { 0xaaaaf9a44b40, 0xaaaaf9a44b90 }
+Next token is token 'a' (0xaaaaf9a44be0 'a')
+Shifting token 'a' (0xaaaaf9a44be0 'a')
 Entering state 2
 Stack now 0 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaea841be0 'a')
--> $$ = nterm item (0xaaaaea841be0 'a')
+   $1 = token 'a' (0xaaaaf9a44be0 'a')
+-> $$ = nterm item (0xaaaaf9a44be0 'a')
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xaaaaea841c30->Object::Object { 0xaaaaea841b40, 0xaaaaea841b90, 0xaaaaea841be0 }
-Next token is token 'a' (0xaaaaea841c30 'a')
-Shifting token 'a' (0xaaaaea841c30 'a')
+0xaaaaf9a44c30->Object::Object { 0xaaaaf9a44b40, 0xaaaaf9a44b90, 0xaaaaf9a44be0 }
+Next token is token 'a' (0xaaaaf9a44c30 'a')
+Shifting token 'a' (0xaaaaf9a44c30 'a')
 Entering state 2
 Stack now 0 11 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaea841c30 'a')
--> $$ = nterm item (0xaaaaea841c30 'a')
+   $1 = token 'a' (0xaaaaf9a44c30 'a')
+-> $$ = nterm item (0xaaaaf9a44c30 'a')
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xaaaaea841c80->Object::Object { 0xaaaaea841b40, 0xaaaaea841b90, 0xaaaaea841be0, 0xaaaaea841c30 }
-Next token is token 'p' (0xaaaaea841c80 'p'Exception caught: cleaning lookahead and stack
-0xaaaaea841c80->Object::~Object { 0xaaaaea841b40, 0xaaaaea841b90, 0xaaaaea841be0, 0xaaaaea841c30, 0xaaaaea841c80 }
-0xaaaaea841c30->Object::~Object { 0xaaaaea841b40, 0xaaaaea841b90, 0xaaaaea841be0, 0xaaaaea841c30 }
-0xaaaaea841be0->Object::~Object { 0xaaaaea841b40, 0xaaaaea841b90, 0xaaaaea841be0 }
-0xaaaaea841b90->Object::~Object { 0xaaaaea841b40, 0xaaaaea841b90 }
-0xaaaaea841b40->Object::~Object { 0xaaaaea841b40 }
+0xaaaaf9a44c80->Object::Object { 0xaaaaf9a44b40, 0xaaaaf9a44b90, 0xaaaaf9a44be0, 0xaaaaf9a44c30 }
+Next token is token 'p' (0xaaaaf9a44c80 'p'Exception caught: cleaning lookahead and stack
+0xaaaaf9a44c80->Object::~Object { 0xaaaaf9a44b40, 0xaaaaf9a44b90, 0xaaaaf9a44be0, 0xaaaaf9a44c30, 0xaaaaf9a44c80 }
+0xaaaaf9a44c30->Object::~Object { 0xaaaaf9a44b40, 0xaaaaf9a44b90, 0xaaaaf9a44be0, 0xaaaaf9a44c30 }
+0xaaaaf9a44be0->Object::~Object { 0xaaaaf9a44b40, 0xaaaaf9a44b90, 0xaaaaf9a44be0 }
+0xaaaaf9a44b90->Object::~Object { 0xaaaaf9a44b40, 0xaaaaf9a44b90 }
+0xaaaaf9a44b40->Object::~Object { 0xaaaaf9a44b40 }
 exception caught: printer
 end { }
 ./c++.at:1360: grep '^exception caught: printer$' stderr
@@ -253501,332 +253424,61 @@
 stderr:
 exception caught: syntax error
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1360:  $PREPARSER ./input aaaaE
-stderr:
-exception caught: syntax error, unexpected end of file, expecting 'a'
-./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stdout:
-./c++.at:857:  $PREPARSER ./input
-./c++.at:1360:  $PREPARSER ./input aaaaT
-stderr:
-./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:857: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-./c++.at:1360:  $PREPARSER ./input aaaaR
-stderr:
-./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:1360: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./c++.at:851:  $PREPARSER ./input
-stderr:
-./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:851: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./c++.at:1065:  $PREPARSER ./input < in
-stderr:
-error: invalid expression
-caught error
-error: invalid character
-caught error
-./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1065:  $PREPARSER ./input < in
-stderr:
-error: invalid expression
-./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1065:  $PREPARSER ./input < in
-stderr:
-error: invalid character
-./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:1065: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
 stderr:
 stdout:
 ./c++.at:1064:  $PREPARSER ./input < in
+./c++.at:1360:  $PREPARSER ./input aaaaE
+stderr:
 stderr:
 error: invalid expression
 caught error
 error: invalid character
 caught error
 ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+exception caught: syntax error, unexpected end of file, expecting 'a'
+./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1064:  $PREPARSER ./input < in
+./c++.at:1360:  $PREPARSER ./input aaaaT
 stderr:
 error: invalid expression
 ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1064:  $PREPARSER ./input < in
+./c++.at:1360:  $PREPARSER ./input aaaaR
 stderr:
+stderr:
+./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 error: invalid character
 ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
+./c++.at:1360: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+======== Testing with C++ standard flags: ''
 ./c++.at:1064: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
 stderr:
 stdout:
-./c++.at:1362:  $PREPARSER ./input aaaas
-stderr:
-exception caught: reduction
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1362:  $PREPARSER ./input aaaal
-stderr:
-exception caught: yylex
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stdout:
 ./c++.at:857:  $PREPARSER ./input
-./c++.at:1362:  $PREPARSER ./input i
-stderr:
 stderr:
-exception caught: initial-action
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:1362:  $PREPARSER ./input aaaap
 ./c++.at:857: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1362:  $PREPARSER ./input --debug aaaap
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-0xffffd9920680->Object::Object { }
-0xffffd9920750->Object::Object { 0xffffd9920680 }
-0xffffd9920680->Object::~Object { 0xffffd9920680, 0xffffd9920750 }
-Next token is token 'a' (0xffffd9920750 'a')
-0xffffd9920670->Object::Object { 0xffffd9920750 }
-0xffffd9920750->Object::~Object { 0xffffd9920670, 0xffffd9920750 }
-Shifting token 'a' (0xffffd9920670 'a')
-0xaaab0c914ee0->Object::Object { 0xffffd9920670 }
-0xffffd9920670->Object::~Object { 0xaaab0c914ee0, 0xffffd9920670 }
-Entering state 2
-Stack now 0 2
-0xffffd9920770->Object::Object { 0xaaab0c914ee0 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab0c914ee0 'a')
--> $$ = nterm item (0xffffd9920770 'a')
-0xaaab0c914ee0->Object::~Object { 0xaaab0c914ee0, 0xffffd9920770 }
-0xaaab0c914ee0->Object::Object { 0xffffd9920770 }
-0xffffd9920770->Object::~Object { 0xaaab0c914ee0, 0xffffd9920770 }
-Entering state 11
-Stack now 0 11
-Reading a token
-0xffffd9920680->Object::Object { 0xaaab0c914ee0 }
-0xffffd9920750->Object::Object { 0xaaab0c914ee0, 0xffffd9920680 }
-0xffffd9920680->Object::~Object { 0xaaab0c914ee0, 0xffffd9920680, 0xffffd9920750 }
-Next token is token 'a' (0xffffd9920750 'a')
-0xffffd9920670->Object::Object { 0xaaab0c914ee0, 0xffffd9920750 }
-0xffffd9920750->Object::~Object { 0xaaab0c914ee0, 0xffffd9920670, 0xffffd9920750 }
-Shifting token 'a' (0xffffd9920670 'a')
-0xaaab0c914f00->Object::Object { 0xaaab0c914ee0, 0xffffd9920670 }
-0xffffd9920670->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xffffd9920670 }
-Entering state 2
-Stack now 0 11 2
-0xffffd9920770->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab0c914f00 'a')
--> $$ = nterm item (0xffffd9920770 'a')
-0xaaab0c914f00->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xffffd9920770 }
-0xaaab0c914f00->Object::Object { 0xaaab0c914ee0, 0xffffd9920770 }
-0xffffd9920770->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xffffd9920770 }
-Entering state 11
-Stack now 0 11 11
-Reading a token
-0xffffd9920680->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00 }
-0xffffd9920750->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xffffd9920680 }
-0xffffd9920680->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xffffd9920680, 0xffffd9920750 }
-Next token is token 'a' (0xffffd9920750 'a')
-0xffffd9920670->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xffffd9920750 }
-0xffffd9920750->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xffffd9920670, 0xffffd9920750 }
-Shifting token 'a' (0xffffd9920670 'a')
-0xaaab0c914f20->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xffffd9920670 }
-0xffffd9920670->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xffffd9920670 }
-Entering state 2
-Stack now 0 11 11 2
-0xffffd9920770->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab0c914f20 'a')
--> $$ = nterm item (0xffffd9920770 'a')
-0xaaab0c914f20->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xffffd9920770 }
-0xaaab0c914f20->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xffffd9920770 }
-0xffffd9920770->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xffffd9920770 }
-Entering state 11
-Stack now 0 11 11 11
-Reading a token
-0xffffd9920680->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20 }
-0xffffd9920750->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xffffd9920680 }
-0xffffd9920680->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xffffd9920680, 0xffffd9920750 }
-Next token is token 'a' (0xffffd9920750 'a')
-0xffffd9920670->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xffffd9920750 }
-0xffffd9920750->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xffffd9920670, 0xffffd9920750 }
-Shifting token 'a' (0xffffd9920670 'a')
-0xaaab0c914f40->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xffffd9920670 }
-0xffffd9920670->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xaaab0c914f40, 0xffffd9920670 }
-Entering state 2
-Stack now 0 11 11 11 2
-0xffffd9920770->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xaaab0c914f40 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab0c914f40 'a')
--> $$ = nterm item (0xffffd9920770 'a')
-0xaaab0c914f40->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xaaab0c914f40, 0xffffd9920770 }
-0xaaab0c914f40->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xffffd9920770 }
-0xffffd9920770->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xaaab0c914f40, 0xffffd9920770 }
-Entering state 11
-Stack now 0 11 11 11 11
-Reading a token
-0xffffd9920680->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xaaab0c914f40 }
-0xffffd9920750->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xaaab0c914f40, 0xffffd9920680 }
-0xffffd9920680->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xaaab0c914f40, 0xffffd9920680, 0xffffd9920750 }
-Next token is token 'p' (0xffffd9920750 'p'Exception caught: cleaning lookahead and stack
-0xaaab0c914f40->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xaaab0c914f40, 0xffffd9920750 }
-0xaaab0c914f20->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xffffd9920750 }
-0xaaab0c914f00->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xffffd9920750 }
-0xaaab0c914ee0->Object::~Object { 0xaaab0c914ee0, 0xffffd9920750 }
-0xffffd9920750->Object::~Object { 0xffffd9920750 }
-exception caught: printer
-end { }
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-0xffffd9920680->Object::Object { }
-0xffffd9920750->Object::Object { 0xffffd9920680 }
-0xffffd9920680->Object::~Object { 0xffffd9920680, 0xffffd9920750 }
-Next token is token 'a' (0xffffd9920750 'a')
-0xffffd9920670->Object::Object { 0xffffd9920750 }
-0xffffd9920750->Object::~Object { 0xffffd9920670, 0xffffd9920750 }
-Shifting token 'a' (0xffffd9920670 'a')
-0xaaab0c914ee0->Object::Object { 0xffffd9920670 }
-0xffffd9920670->Object::~Object { 0xaaab0c914ee0, 0xffffd9920670 }
-Entering state 2
-Stack now 0 2
-0xffffd9920770->Object::Object { 0xaaab0c914ee0 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab0c914ee0 'a')
--> $$ = nterm item (0xffffd9920770 'a')
-0xaaab0c914ee0->Object::~Object { 0xaaab0c914ee0, 0xffffd9920770 }
-0xaaab0c914ee0->Object::Object { 0xffffd9920770 }
-0xffffd9920770->Object::~Object { 0xaaab0c914ee0, 0xffffd9920770 }
-Entering state 11
-Stack now 0 11
-Reading a token
-0xffffd9920680->Object::Object { 0xaaab0c914ee0 }
-0xffffd9920750->Object::Object { 0xaaab0c914ee0, 0xffffd9920680 }
-0xffffd9920680->Object::~Object { 0xaaab0c914ee0, 0xffffd9920680, 0xffffd9920750 }
-Next token is token 'a' (0xffffd9920750 'a')
-0xffffd9920670->Object::Object { 0xaaab0c914ee0, 0xffffd9920750 }
-0xffffd9920750->Object::~Object { 0xaaab0c914ee0, 0xffffd9920670, 0xffffd9920750 }
-Shifting token 'a' (0xffffd9920670 'a')
-0xaaab0c914f00->Object::Object { 0xaaab0c914ee0, 0xffffd9920670 }
-0xffffd9920670->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xffffd9920670 }
-Entering state 2
-Stack now 0 11 2
-0xffffd9920770->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab0c914f00 'a')
--> $$ = nterm item (0xffffd9920770 'a')
-0xaaab0c914f00->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xffffd9920770 }
-0xaaab0c914f00->Object::Object { 0xaaab0c914ee0, 0xffffd9920770 }
-0xffffd9920770->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xffffd9920770 }
-Entering state 11
-Stack now 0 11 11
-Reading a token
-0xffffd9920680->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00 }
-0xffffd9920750->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xffffd9920680 }
-0xffffd9920680->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xffffd9920680, 0xffffd9920750 }
-Next token is token 'a' (0xffffd9920750 'a')
-0xffffd9920670->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xffffd9920750 }
-0xffffd9920750->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xffffd9920670, 0xffffd9920750 }
-Shifting token 'a' (0xffffd9920670 'a')
-0xaaab0c914f20->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xffffd9920670 }
-0xffffd9920670->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xffffd9920670 }
-Entering state 2
-Stack now 0 11 11 2
-0xffffd9920770->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab0c914f20 'a')
--> $$ = nterm item (0xffffd9920770 'a')
-0xaaab0c914f20->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xffffd9920770 }
-0xaaab0c914f20->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xffffd9920770 }
-0xffffd9920770->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xffffd9920770 }
-Entering state 11
-Stack now 0 11 11 11
-Reading a token
-0xffffd9920680->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20 }
-0xffffd9920750->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xffffd9920680 }
-0xffffd9920680->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xffffd9920680, 0xffffd9920750 }
-Next token is token 'a' (0xffffd9920750 'a')
-0xffffd9920670->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xffffd9920750 }
-0xffffd9920750->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xffffd9920670, 0xffffd9920750 }
-Shifting token 'a' (0xffffd9920670 'a')
-0xaaab0c914f40->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xffffd9920670 }
-0xffffd9920670->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xaaab0c914f40, 0xffffd9920670 }
-Entering state 2
-Stack now 0 11 11 11 2
-0xffffd9920770->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xaaab0c914f40 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab0c914f40 'a')
--> $$ = nterm item (0xffffd9920770 'a')
-0xaaab0c914f40->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xaaab0c914f40, 0xffffd9920770 }
-0xaaab0c914f40->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xffffd9920770 }
-0xffffd9920770->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xaaab0c914f40, 0xffffd9920770 }
-Entering state 11
-Stack now 0 11 11 11 11
-Reading a token
-0xffffd9920680->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xaaab0c914f40 }
-0xffffd9920750->Object::Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xaaab0c914f40, 0xffffd9920680 }
-0xffffd9920680->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xaaab0c914f40, 0xffffd9920680, 0xffffd9920750 }
-Next token is token 'p' (0xffffd9920750 'p'Exception caught: cleaning lookahead and stack
-0xaaab0c914f40->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xaaab0c914f40, 0xffffd9920750 }
-0xaaab0c914f20->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xaaab0c914f20, 0xffffd9920750 }
-0xaaab0c914f00->Object::~Object { 0xaaab0c914ee0, 0xaaab0c914f00, 0xffffd9920750 }
-0xaaab0c914ee0->Object::~Object { 0xaaab0c914ee0, 0xffffd9920750 }
-0xffffd9920750->Object::~Object { 0xffffd9920750 }
-exception caught: printer
-end { }
-./c++.at:1362: grep '^exception caught: printer$' stderr
 stdout:
-exception caught: printer
-./c++.at:1362:  $PREPARSER ./input aaaae
-stderr:
-exception caught: syntax error
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1362:  $PREPARSER ./input aaaaE
+./c++.at:1361:  $PREPARSER ./input aaaas
 stderr:
-exception caught: syntax error, unexpected end of file, expecting 'a'
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+exception caught: reduction
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./c++.at:1362:  $PREPARSER ./input aaaaT
 stdout:
+./c++.at:1361:  $PREPARSER ./input aaaal
 ./c++.at:851:  $PREPARSER ./input
 stderr:
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1362:  $PREPARSER ./input aaaaR
 stderr:
 ./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:851: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:1362: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./c++.at:1361:  $PREPARSER ./input aaaas
-stderr:
-exception caught: reduction
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1361:  $PREPARSER ./input aaaal
-stderr:
 exception caught: yylex
 ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:851: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 ./c++.at:1361:  $PREPARSER ./input i
 stderr:
 exception caught: initial-action
@@ -253840,57 +253492,57 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xaaaaea8f2b40->Object::Object { }
-Next token is token 'a' (0xaaaaea8f2b40 'a')
-Shifting token 'a' (0xaaaaea8f2b40 'a')
+0xaaaaeb1beb40->Object::Object { }
+Next token is token 'a' (0xaaaaeb1beb40 'a')
+Shifting token 'a' (0xaaaaeb1beb40 'a')
 Entering state 1
 Stack now 0 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaea8f2b40 'a')
--> $$ = nterm item (0xaaaaea8f2b40 'a')
+   $1 = token 'a' (0xaaaaeb1beb40 'a')
+-> $$ = nterm item (0xaaaaeb1beb40 'a')
 Entering state 10
 Stack now 0 10
 Reading a token
-0xaaaaea8f2b90->Object::Object { 0xaaaaea8f2b40 }
-Next token is token 'a' (0xaaaaea8f2b90 'a')
-Shifting token 'a' (0xaaaaea8f2b90 'a')
+0xaaaaeb1beb90->Object::Object { 0xaaaaeb1beb40 }
+Next token is token 'a' (0xaaaaeb1beb90 'a')
+Shifting token 'a' (0xaaaaeb1beb90 'a')
 Entering state 1
 Stack now 0 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaea8f2b90 'a')
--> $$ = nterm item (0xaaaaea8f2b90 'a')
+   $1 = token 'a' (0xaaaaeb1beb90 'a')
+-> $$ = nterm item (0xaaaaeb1beb90 'a')
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xaaaaea8f2be0->Object::Object { 0xaaaaea8f2b40, 0xaaaaea8f2b90 }
-Next token is token 'a' (0xaaaaea8f2be0 'a')
-Shifting token 'a' (0xaaaaea8f2be0 'a')
+0xaaaaeb1bebe0->Object::Object { 0xaaaaeb1beb40, 0xaaaaeb1beb90 }
+Next token is token 'a' (0xaaaaeb1bebe0 'a')
+Shifting token 'a' (0xaaaaeb1bebe0 'a')
 Entering state 1
 Stack now 0 10 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaea8f2be0 'a')
--> $$ = nterm item (0xaaaaea8f2be0 'a')
+   $1 = token 'a' (0xaaaaeb1bebe0 'a')
+-> $$ = nterm item (0xaaaaeb1bebe0 'a')
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xaaaaea8f2c30->Object::Object { 0xaaaaea8f2b40, 0xaaaaea8f2b90, 0xaaaaea8f2be0 }
-Next token is token 'a' (0xaaaaea8f2c30 'a')
-Shifting token 'a' (0xaaaaea8f2c30 'a')
+0xaaaaeb1bec30->Object::Object { 0xaaaaeb1beb40, 0xaaaaeb1beb90, 0xaaaaeb1bebe0 }
+Next token is token 'a' (0xaaaaeb1bec30 'a')
+Shifting token 'a' (0xaaaaeb1bec30 'a')
 Entering state 1
 Stack now 0 10 10 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaea8f2c30 'a')
--> $$ = nterm item (0xaaaaea8f2c30 'a')
+   $1 = token 'a' (0xaaaaeb1bec30 'a')
+-> $$ = nterm item (0xaaaaeb1bec30 'a')
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xaaaaea8f2c80->Object::Object { 0xaaaaea8f2b40, 0xaaaaea8f2b90, 0xaaaaea8f2be0, 0xaaaaea8f2c30 }
-Next token is token 'p' (0xaaaaea8f2c80 'p'Exception caught: cleaning lookahead and stack
-0xaaaaea8f2c80->Object::~Object { 0xaaaaea8f2b40, 0xaaaaea8f2b90, 0xaaaaea8f2be0, 0xaaaaea8f2c30, 0xaaaaea8f2c80 }
-0xaaaaea8f2c30->Object::~Object { 0xaaaaea8f2b40, 0xaaaaea8f2b90, 0xaaaaea8f2be0, 0xaaaaea8f2c30 }
-0xaaaaea8f2be0->Object::~Object { 0xaaaaea8f2b40, 0xaaaaea8f2b90, 0xaaaaea8f2be0 }
-0xaaaaea8f2b90->Object::~Object { 0xaaaaea8f2b40, 0xaaaaea8f2b90 }
-0xaaaaea8f2b40->Object::~Object { 0xaaaaea8f2b40 }
+0xaaaaeb1bec80->Object::Object { 0xaaaaeb1beb40, 0xaaaaeb1beb90, 0xaaaaeb1bebe0, 0xaaaaeb1bec30 }
+Next token is token 'p' (0xaaaaeb1bec80 'p'Exception caught: cleaning lookahead and stack
+0xaaaaeb1bec80->Object::~Object { 0xaaaaeb1beb40, 0xaaaaeb1beb90, 0xaaaaeb1bebe0, 0xaaaaeb1bec30, 0xaaaaeb1bec80 }
+0xaaaaeb1bec30->Object::~Object { 0xaaaaeb1beb40, 0xaaaaeb1beb90, 0xaaaaeb1bebe0, 0xaaaaeb1bec30 }
+0xaaaaeb1bebe0->Object::~Object { 0xaaaaeb1beb40, 0xaaaaeb1beb90, 0xaaaaeb1bebe0 }
+0xaaaaeb1beb90->Object::~Object { 0xaaaaeb1beb40, 0xaaaaeb1beb90 }
+0xaaaaeb1beb40->Object::~Object { 0xaaaaeb1beb40 }
 exception caught: printer
 end { }
 ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -253899,57 +253551,57 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xaaaaea8f2b40->Object::Object { }
-Next token is token 'a' (0xaaaaea8f2b40 'a')
-Shifting token 'a' (0xaaaaea8f2b40 'a')
+0xaaaaeb1beb40->Object::Object { }
+Next token is token 'a' (0xaaaaeb1beb40 'a')
+Shifting token 'a' (0xaaaaeb1beb40 'a')
 Entering state 1
 Stack now 0 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaea8f2b40 'a')
--> $$ = nterm item (0xaaaaea8f2b40 'a')
+   $1 = token 'a' (0xaaaaeb1beb40 'a')
+-> $$ = nterm item (0xaaaaeb1beb40 'a')
 Entering state 10
 Stack now 0 10
 Reading a token
-0xaaaaea8f2b90->Object::Object { 0xaaaaea8f2b40 }
-Next token is token 'a' (0xaaaaea8f2b90 'a')
-Shifting token 'a' (0xaaaaea8f2b90 'a')
+0xaaaaeb1beb90->Object::Object { 0xaaaaeb1beb40 }
+Next token is token 'a' (0xaaaaeb1beb90 'a')
+Shifting token 'a' (0xaaaaeb1beb90 'a')
 Entering state 1
 Stack now 0 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaea8f2b90 'a')
--> $$ = nterm item (0xaaaaea8f2b90 'a')
+   $1 = token 'a' (0xaaaaeb1beb90 'a')
+-> $$ = nterm item (0xaaaaeb1beb90 'a')
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xaaaaea8f2be0->Object::Object { 0xaaaaea8f2b40, 0xaaaaea8f2b90 }
-Next token is token 'a' (0xaaaaea8f2be0 'a')
-Shifting token 'a' (0xaaaaea8f2be0 'a')
+0xaaaaeb1bebe0->Object::Object { 0xaaaaeb1beb40, 0xaaaaeb1beb90 }
+Next token is token 'a' (0xaaaaeb1bebe0 'a')
+Shifting token 'a' (0xaaaaeb1bebe0 'a')
 Entering state 1
 Stack now 0 10 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaea8f2be0 'a')
--> $$ = nterm item (0xaaaaea8f2be0 'a')
+   $1 = token 'a' (0xaaaaeb1bebe0 'a')
+-> $$ = nterm item (0xaaaaeb1bebe0 'a')
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xaaaaea8f2c30->Object::Object { 0xaaaaea8f2b40, 0xaaaaea8f2b90, 0xaaaaea8f2be0 }
-Next token is token 'a' (0xaaaaea8f2c30 'a')
-Shifting token 'a' (0xaaaaea8f2c30 'a')
+0xaaaaeb1bec30->Object::Object { 0xaaaaeb1beb40, 0xaaaaeb1beb90, 0xaaaaeb1bebe0 }
+Next token is token 'a' (0xaaaaeb1bec30 'a')
+Shifting token 'a' (0xaaaaeb1bec30 'a')
 Entering state 1
 Stack now 0 10 10 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaaea8f2c30 'a')
--> $$ = nterm item (0xaaaaea8f2c30 'a')
+   $1 = token 'a' (0xaaaaeb1bec30 'a')
+-> $$ = nterm item (0xaaaaeb1bec30 'a')
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xaaaaea8f2c80->Object::Object { 0xaaaaea8f2b40, 0xaaaaea8f2b90, 0xaaaaea8f2be0, 0xaaaaea8f2c30 }
-Next token is token 'p' (0xaaaaea8f2c80 'p'Exception caught: cleaning lookahead and stack
-0xaaaaea8f2c80->Object::~Object { 0xaaaaea8f2b40, 0xaaaaea8f2b90, 0xaaaaea8f2be0, 0xaaaaea8f2c30, 0xaaaaea8f2c80 }
-0xaaaaea8f2c30->Object::~Object { 0xaaaaea8f2b40, 0xaaaaea8f2b90, 0xaaaaea8f2be0, 0xaaaaea8f2c30 }
-0xaaaaea8f2be0->Object::~Object { 0xaaaaea8f2b40, 0xaaaaea8f2b90, 0xaaaaea8f2be0 }
-0xaaaaea8f2b90->Object::~Object { 0xaaaaea8f2b40, 0xaaaaea8f2b90 }
-0xaaaaea8f2b40->Object::~Object { 0xaaaaea8f2b40 }
+0xaaaaeb1bec80->Object::Object { 0xaaaaeb1beb40, 0xaaaaeb1beb90, 0xaaaaeb1bebe0, 0xaaaaeb1bec30 }
+Next token is token 'p' (0xaaaaeb1bec80 'p'Exception caught: cleaning lookahead and stack
+0xaaaaeb1bec80->Object::~Object { 0xaaaaeb1beb40, 0xaaaaeb1beb90, 0xaaaaeb1bebe0, 0xaaaaeb1bec30, 0xaaaaeb1bec80 }
+0xaaaaeb1bec30->Object::~Object { 0xaaaaeb1beb40, 0xaaaaeb1beb90, 0xaaaaeb1bebe0, 0xaaaaeb1bec30 }
+0xaaaaeb1bebe0->Object::~Object { 0xaaaaeb1beb40, 0xaaaaeb1beb90, 0xaaaaeb1bebe0 }
+0xaaaaeb1beb90->Object::~Object { 0xaaaaeb1beb40, 0xaaaaeb1beb90 }
+0xaaaaeb1beb40->Object::~Object { 0xaaaaeb1beb40 }
 exception caught: printer
 end { }
 ./c++.at:1361: grep '^exception caught: printer$' stderr
@@ -253959,30 +253611,56 @@
 stderr:
 exception caught: syntax error
 ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-stdout:
-./c++.at:1360:  $PREPARSER ./input aaaas
 ./c++.at:1361:  $PREPARSER ./input aaaaE
 stderr:
-stderr:
 exception caught: syntax error, unexpected end of file, expecting 'a'
 ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-exception caught: reduction
-./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1360:  $PREPARSER ./input aaaal
 ./c++.at:1361:  $PREPARSER ./input aaaaT
 stderr:
-stderr:
-exception caught: yylex
-./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1361:  $PREPARSER ./input aaaaR
 stderr:
-./c++.at:1360:  $PREPARSER ./input i
 ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:1361: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
+stdout:
+./c++.at:857:  $PREPARSER ./input
+stderr:
+./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:857: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./c++.at:1065:  $PREPARSER ./input < in
+stderr:
+error: invalid expression
+caught error
+error: invalid character
+caught error
+./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1065:  $PREPARSER ./input < in
+stderr:
+error: invalid expression
+./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1065:  $PREPARSER ./input < in
+stderr:
+error: invalid character
+./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:1065: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
+stderr:
+stdout:
+./c++.at:1360:  $PREPARSER ./input aaaas
+stderr:
+exception caught: reduction
+./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1360:  $PREPARSER ./input aaaal
+stderr:
+exception caught: yylex
+./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1360:  $PREPARSER ./input i
+stderr:
 exception caught: initial-action
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1360:  $PREPARSER ./input aaaap
@@ -253994,57 +253672,57 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xaaaad72d1b40->Object::Object { }
-Next token is token 'a' (0xaaaad72d1b40 'a')
-Shifting token 'a' (0xaaaad72d1b40 'a')
+0xaaab0217ab40->Object::Object { }
+Next token is token 'a' (0xaaab0217ab40 'a')
+Shifting token 'a' (0xaaab0217ab40 'a')
 Entering state 2
 Stack now 0 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaad72d1b40 'a')
--> $$ = nterm item (0xaaaad72d1b40 'a')
+   $1 = token 'a' (0xaaab0217ab40 'a')
+-> $$ = nterm item (0xaaab0217ab40 'a')
 Entering state 11
 Stack now 0 11
 Reading a token
-0xaaaad72d1b90->Object::Object { 0xaaaad72d1b40 }
-Next token is token 'a' (0xaaaad72d1b90 'a')
-Shifting token 'a' (0xaaaad72d1b90 'a')
+0xaaab0217ab90->Object::Object { 0xaaab0217ab40 }
+Next token is token 'a' (0xaaab0217ab90 'a')
+Shifting token 'a' (0xaaab0217ab90 'a')
 Entering state 2
 Stack now 0 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaad72d1b90 'a')
--> $$ = nterm item (0xaaaad72d1b90 'a')
+   $1 = token 'a' (0xaaab0217ab90 'a')
+-> $$ = nterm item (0xaaab0217ab90 'a')
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xaaaad72d1be0->Object::Object { 0xaaaad72d1b40, 0xaaaad72d1b90 }
-Next token is token 'a' (0xaaaad72d1be0 'a')
-Shifting token 'a' (0xaaaad72d1be0 'a')
+0xaaab0217abe0->Object::Object { 0xaaab0217ab40, 0xaaab0217ab90 }
+Next token is token 'a' (0xaaab0217abe0 'a')
+Shifting token 'a' (0xaaab0217abe0 'a')
 Entering state 2
 Stack now 0 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaad72d1be0 'a')
--> $$ = nterm item (0xaaaad72d1be0 'a')
+   $1 = token 'a' (0xaaab0217abe0 'a')
+-> $$ = nterm item (0xaaab0217abe0 'a')
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xaaaad72d1c30->Object::Object { 0xaaaad72d1b40, 0xaaaad72d1b90, 0xaaaad72d1be0 }
-Next token is token 'a' (0xaaaad72d1c30 'a')
-Shifting token 'a' (0xaaaad72d1c30 'a')
+0xaaab0217ac30->Object::Object { 0xaaab0217ab40, 0xaaab0217ab90, 0xaaab0217abe0 }
+Next token is token 'a' (0xaaab0217ac30 'a')
+Shifting token 'a' (0xaaab0217ac30 'a')
 Entering state 2
 Stack now 0 11 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaad72d1c30 'a')
--> $$ = nterm item (0xaaaad72d1c30 'a')
+   $1 = token 'a' (0xaaab0217ac30 'a')
+-> $$ = nterm item (0xaaab0217ac30 'a')
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xaaaad72d1c80->Object::Object { 0xaaaad72d1b40, 0xaaaad72d1b90, 0xaaaad72d1be0, 0xaaaad72d1c30 }
-Next token is token 'p' (0xaaaad72d1c80 'p'Exception caught: cleaning lookahead and stack
-0xaaaad72d1c80->Object::~Object { 0xaaaad72d1b40, 0xaaaad72d1b90, 0xaaaad72d1be0, 0xaaaad72d1c30, 0xaaaad72d1c80 }
-0xaaaad72d1c30->Object::~Object { 0xaaaad72d1b40, 0xaaaad72d1b90, 0xaaaad72d1be0, 0xaaaad72d1c30 }
-0xaaaad72d1be0->Object::~Object { 0xaaaad72d1b40, 0xaaaad72d1b90, 0xaaaad72d1be0 }
-0xaaaad72d1b90->Object::~Object { 0xaaaad72d1b40, 0xaaaad72d1b90 }
-0xaaaad72d1b40->Object::~Object { 0xaaaad72d1b40 }
+0xaaab0217ac80->Object::Object { 0xaaab0217ab40, 0xaaab0217ab90, 0xaaab0217abe0, 0xaaab0217ac30 }
+Next token is token 'p' (0xaaab0217ac80 'p'Exception caught: cleaning lookahead and stack
+0xaaab0217ac80->Object::~Object { 0xaaab0217ab40, 0xaaab0217ab90, 0xaaab0217abe0, 0xaaab0217ac30, 0xaaab0217ac80 }
+0xaaab0217ac30->Object::~Object { 0xaaab0217ab40, 0xaaab0217ab90, 0xaaab0217abe0, 0xaaab0217ac30 }
+0xaaab0217abe0->Object::~Object { 0xaaab0217ab40, 0xaaab0217ab90, 0xaaab0217abe0 }
+0xaaab0217ab90->Object::~Object { 0xaaab0217ab40, 0xaaab0217ab90 }
+0xaaab0217ab40->Object::~Object { 0xaaab0217ab40 }
 exception caught: printer
 end { }
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -254053,57 +253731,57 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xaaaad72d1b40->Object::Object { }
-Next token is token 'a' (0xaaaad72d1b40 'a')
-Shifting token 'a' (0xaaaad72d1b40 'a')
+0xaaab0217ab40->Object::Object { }
+Next token is token 'a' (0xaaab0217ab40 'a')
+Shifting token 'a' (0xaaab0217ab40 'a')
 Entering state 2
 Stack now 0 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaad72d1b40 'a')
--> $$ = nterm item (0xaaaad72d1b40 'a')
+   $1 = token 'a' (0xaaab0217ab40 'a')
+-> $$ = nterm item (0xaaab0217ab40 'a')
 Entering state 11
 Stack now 0 11
 Reading a token
-0xaaaad72d1b90->Object::Object { 0xaaaad72d1b40 }
-Next token is token 'a' (0xaaaad72d1b90 'a')
-Shifting token 'a' (0xaaaad72d1b90 'a')
+0xaaab0217ab90->Object::Object { 0xaaab0217ab40 }
+Next token is token 'a' (0xaaab0217ab90 'a')
+Shifting token 'a' (0xaaab0217ab90 'a')
 Entering state 2
 Stack now 0 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaad72d1b90 'a')
--> $$ = nterm item (0xaaaad72d1b90 'a')
+   $1 = token 'a' (0xaaab0217ab90 'a')
+-> $$ = nterm item (0xaaab0217ab90 'a')
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xaaaad72d1be0->Object::Object { 0xaaaad72d1b40, 0xaaaad72d1b90 }
-Next token is token 'a' (0xaaaad72d1be0 'a')
-Shifting token 'a' (0xaaaad72d1be0 'a')
+0xaaab0217abe0->Object::Object { 0xaaab0217ab40, 0xaaab0217ab90 }
+Next token is token 'a' (0xaaab0217abe0 'a')
+Shifting token 'a' (0xaaab0217abe0 'a')
 Entering state 2
 Stack now 0 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaad72d1be0 'a')
--> $$ = nterm item (0xaaaad72d1be0 'a')
+   $1 = token 'a' (0xaaab0217abe0 'a')
+-> $$ = nterm item (0xaaab0217abe0 'a')
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xaaaad72d1c30->Object::Object { 0xaaaad72d1b40, 0xaaaad72d1b90, 0xaaaad72d1be0 }
-Next token is token 'a' (0xaaaad72d1c30 'a')
-Shifting token 'a' (0xaaaad72d1c30 'a')
+0xaaab0217ac30->Object::Object { 0xaaab0217ab40, 0xaaab0217ab90, 0xaaab0217abe0 }
+Next token is token 'a' (0xaaab0217ac30 'a')
+Shifting token 'a' (0xaaab0217ac30 'a')
 Entering state 2
 Stack now 0 11 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaad72d1c30 'a')
--> $$ = nterm item (0xaaaad72d1c30 'a')
+   $1 = token 'a' (0xaaab0217ac30 'a')
+-> $$ = nterm item (0xaaab0217ac30 'a')
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xaaaad72d1c80->Object::Object { 0xaaaad72d1b40, 0xaaaad72d1b90, 0xaaaad72d1be0, 0xaaaad72d1c30 }
-Next token is token 'p' (0xaaaad72d1c80 'p'Exception caught: cleaning lookahead and stack
-0xaaaad72d1c80->Object::~Object { 0xaaaad72d1b40, 0xaaaad72d1b90, 0xaaaad72d1be0, 0xaaaad72d1c30, 0xaaaad72d1c80 }
-0xaaaad72d1c30->Object::~Object { 0xaaaad72d1b40, 0xaaaad72d1b90, 0xaaaad72d1be0, 0xaaaad72d1c30 }
-0xaaaad72d1be0->Object::~Object { 0xaaaad72d1b40, 0xaaaad72d1b90, 0xaaaad72d1be0 }
-0xaaaad72d1b90->Object::~Object { 0xaaaad72d1b40, 0xaaaad72d1b90 }
-0xaaaad72d1b40->Object::~Object { 0xaaaad72d1b40 }
+0xaaab0217ac80->Object::Object { 0xaaab0217ab40, 0xaaab0217ab90, 0xaaab0217abe0, 0xaaab0217ac30 }
+Next token is token 'p' (0xaaab0217ac80 'p'Exception caught: cleaning lookahead and stack
+0xaaab0217ac80->Object::~Object { 0xaaab0217ab40, 0xaaab0217ab90, 0xaaab0217abe0, 0xaaab0217ac30, 0xaaab0217ac80 }
+0xaaab0217ac30->Object::~Object { 0xaaab0217ab40, 0xaaab0217ab90, 0xaaab0217abe0, 0xaaab0217ac30 }
+0xaaab0217abe0->Object::~Object { 0xaaab0217ab40, 0xaaab0217ab90, 0xaaab0217abe0 }
+0xaaab0217ab90->Object::~Object { 0xaaab0217ab40, 0xaaab0217ab90 }
+0xaaab0217ab40->Object::~Object { 0xaaab0217ab40 }
 exception caught: printer
 end { }
 ./c++.at:1360: grep '^exception caught: printer$' stderr
@@ -254127,39 +253805,6 @@
 ./c++.at:1360: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:857:  $PREPARSER ./input
-stderr:
-./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:857: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./c++.at:1065:  $PREPARSER ./input < in
-stderr:
-error: invalid expression
-caught error
-error: invalid character
-caught error
-./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1065:  $PREPARSER ./input < in
-stderr:
-error: invalid expression
-./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1065:  $PREPARSER ./input < in
-stderr:
-error: invalid character
-./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:1065: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
-stderr:
-stdout:
-./c++.at:851:  $PREPARSER ./input
-stderr:
-./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:851: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
 ./c++.at:1066:  $PREPARSER ./input < in
 stderr:
 error: invalid expression
@@ -254173,20 +253818,16 @@
 ./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1066:  $PREPARSER ./input < in
 stderr:
-error: invalid character
-./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-stderr:
-stdout:
-./c++.at:1066: ./check
-./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
 stderr:
 stdout:
+error: invalid character
+./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1362:  $PREPARSER ./input aaaas
+======== Testing with C++ standard flags: ''
 stderr:
 exception caught: reduction
 ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 ./c++.at:1362:  $PREPARSER ./input aaaal
 stderr:
 exception caught: yylex
@@ -254204,288 +253845,251 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xfffff468e700->Object::Object { }
-0xfffff468e7d0->Object::Object { 0xfffff468e700 }
-0xfffff468e700->Object::~Object { 0xfffff468e700, 0xfffff468e7d0 }
-Next token is token 'a' (0xfffff468e7d0 'a')
-0xfffff468e710->Object::Object { 0xfffff468e7d0 }
-0xfffff468e6c0->Object::Object { 0xfffff468e710, 0xfffff468e7d0 }
-0xfffff468e6c0->Object::~Object { 0xfffff468e6c0, 0xfffff468e710, 0xfffff468e7d0 }
-0xfffff468e7d0->Object::~Object { 0xfffff468e710, 0xfffff468e7d0 }
-Shifting token 'a' (0xfffff468e710 'a')
-0xaaaaf87ceee0->Object::Object { 0xfffff468e710 }
-0xfffff468e698->Object::Object { 0xaaaaf87ceee0, 0xfffff468e710 }
-0xfffff468e698->Object::~Object { 0xaaaaf87ceee0, 0xfffff468e698, 0xfffff468e710 }
-0xfffff468e710->Object::~Object { 0xaaaaf87ceee0, 0xfffff468e710 }
+0xffffd50b7c60->Object::Object { }
+0xffffd50b7d30->Object::Object { 0xffffd50b7c60 }
+0xffffd50b7c60->Object::~Object { 0xffffd50b7c60, 0xffffd50b7d30 }
+Next token is token 'a' (0xffffd50b7d30 'a')
+0xffffd50b7c50->Object::Object { 0xffffd50b7d30 }
+0xffffd50b7d30->Object::~Object { 0xffffd50b7c50, 0xffffd50b7d30 }
+Shifting token 'a' (0xffffd50b7c50 'a')
+0xaaaaf1bffee0->Object::Object { 0xffffd50b7c50 }
+0xffffd50b7c50->Object::~Object { 0xaaaaf1bffee0, 0xffffd50b7c50 }
 Entering state 2
 Stack now 0 2
-0xfffff468e7f0->Object::Object { 0xaaaaf87ceee0 }
+0xffffd50b7d50->Object::Object { 0xaaaaf1bffee0 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaf87ceee0 'a')
--> $$ = nterm item (0xfffff468e7f0 'a')
-0xaaaaf87ceee0->Object::~Object { 0xaaaaf87ceee0, 0xfffff468e7f0 }
-0xaaaaf87ceee0->Object::Object { 0xfffff468e7f0 }
-0xfffff468e7a0->Object::Object { 0xaaaaf87ceee0, 0xfffff468e7f0 }
-0xfffff468e7a0->Object::~Object { 0xaaaaf87ceee0, 0xfffff468e7a0, 0xfffff468e7f0 }
-0xfffff468e7f0->Object::~Object { 0xaaaaf87ceee0, 0xfffff468e7f0 }
+   $1 = token 'a' (0xaaaaf1bffee0 'a')
+-> $$ = nterm item (0xffffd50b7d50 'a')
+0xaaaaf1bffee0->Object::~Object { 0xaaaaf1bffee0, 0xffffd50b7d50 }
+0xaaaaf1bffee0->Object::Object { 0xffffd50b7d50 }
+0xffffd50b7d50->Object::~Object { 0xaaaaf1bffee0, 0xffffd50b7d50 }
 Entering state 11
 Stack now 0 11
 Reading a token
-0xfffff468e700->Object::Object { 0xaaaaf87ceee0 }
-0xfffff468e7d0->Object::Object { 0xaaaaf87ceee0, 0xfffff468e700 }
-0xfffff468e700->Object::~Object { 0xaaaaf87ceee0, 0xfffff468e700, 0xfffff468e7d0 }
-Next token is token 'a' (0xfffff468e7d0 'a')
-0xfffff468e710->Object::Object { 0xaaaaf87ceee0, 0xfffff468e7d0 }
-0xfffff468e6c0->Object::Object { 0xaaaaf87ceee0, 0xfffff468e710, 0xfffff468e7d0 }
-0xfffff468e6c0->Object::~Object { 0xaaaaf87ceee0, 0xfffff468e6c0, 0xfffff468e710, 0xfffff468e7d0 }
-0xfffff468e7d0->Object::~Object { 0xaaaaf87ceee0, 0xfffff468e710, 0xfffff468e7d0 }
-Shifting token 'a' (0xfffff468e710 'a')
-0xaaaaf87cef00->Object::Object { 0xaaaaf87ceee0, 0xfffff468e710 }
-0xfffff468e698->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e710 }
-0xfffff468e698->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e698, 0xfffff468e710 }
-0xfffff468e710->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e710 }
+0xffffd50b7c60->Object::Object { 0xaaaaf1bffee0 }
+0xffffd50b7d30->Object::Object { 0xaaaaf1bffee0, 0xffffd50b7c60 }
+0xffffd50b7c60->Object::~Object { 0xaaaaf1bffee0, 0xffffd50b7c60, 0xffffd50b7d30 }
+Next token is token 'a' (0xffffd50b7d30 'a')
+0xffffd50b7c50->Object::Object { 0xaaaaf1bffee0, 0xffffd50b7d30 }
+0xffffd50b7d30->Object::~Object { 0xaaaaf1bffee0, 0xffffd50b7c50, 0xffffd50b7d30 }
+Shifting token 'a' (0xffffd50b7c50 'a')
+0xaaaaf1bfff00->Object::Object { 0xaaaaf1bffee0, 0xffffd50b7c50 }
+0xffffd50b7c50->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xffffd50b7c50 }
 Entering state 2
 Stack now 0 11 2
-0xfffff468e7f0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00 }
+0xffffd50b7d50->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaf87cef00 'a')
--> $$ = nterm item (0xfffff468e7f0 'a')
-0xaaaaf87cef00->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e7f0 }
-0xaaaaf87cef00->Object::Object { 0xaaaaf87ceee0, 0xfffff468e7f0 }
-0xfffff468e7a0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e7f0 }
-0xfffff468e7a0->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e7a0, 0xfffff468e7f0 }
-0xfffff468e7f0->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e7f0 }
+   $1 = token 'a' (0xaaaaf1bfff00 'a')
+-> $$ = nterm item (0xffffd50b7d50 'a')
+0xaaaaf1bfff00->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xffffd50b7d50 }
+0xaaaaf1bfff00->Object::Object { 0xaaaaf1bffee0, 0xffffd50b7d50 }
+0xffffd50b7d50->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xffffd50b7d50 }
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xfffff468e700->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00 }
-0xfffff468e7d0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e700 }
-0xfffff468e700->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e700, 0xfffff468e7d0 }
-Next token is token 'a' (0xfffff468e7d0 'a')
-0xfffff468e710->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e7d0 }
-0xfffff468e6c0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e710, 0xfffff468e7d0 }
-0xfffff468e6c0->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e6c0, 0xfffff468e710, 0xfffff468e7d0 }
-0xfffff468e7d0->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e710, 0xfffff468e7d0 }
-Shifting token 'a' (0xfffff468e710 'a')
-0xaaaaf87cef20->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e710 }
-0xfffff468e698->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e710 }
-0xfffff468e698->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e698, 0xfffff468e710 }
-0xfffff468e710->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e710 }
+0xffffd50b7c60->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00 }
+0xffffd50b7d30->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xffffd50b7c60 }
+0xffffd50b7c60->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xffffd50b7c60, 0xffffd50b7d30 }
+Next token is token 'a' (0xffffd50b7d30 'a')
+0xffffd50b7c50->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xffffd50b7d30 }
+0xffffd50b7d30->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xffffd50b7c50, 0xffffd50b7d30 }
+Shifting token 'a' (0xffffd50b7c50 'a')
+0xaaaaf1bfff20->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xffffd50b7c50 }
+0xffffd50b7c50->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xffffd50b7c50 }
 Entering state 2
 Stack now 0 11 11 2
-0xfffff468e7f0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20 }
+0xffffd50b7d50->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaf87cef20 'a')
--> $$ = nterm item (0xfffff468e7f0 'a')
-0xaaaaf87cef20->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e7f0 }
-0xaaaaf87cef20->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e7f0 }
-0xfffff468e7a0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e7f0 }
-0xfffff468e7a0->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e7a0, 0xfffff468e7f0 }
-0xfffff468e7f0->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e7f0 }
+   $1 = token 'a' (0xaaaaf1bfff20 'a')
+-> $$ = nterm item (0xffffd50b7d50 'a')
+0xaaaaf1bfff20->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xffffd50b7d50 }
+0xaaaaf1bfff20->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xffffd50b7d50 }
+0xffffd50b7d50->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xffffd50b7d50 }
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xfffff468e700->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20 }
-0xfffff468e7d0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e700 }
-0xfffff468e700->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e700, 0xfffff468e7d0 }
-Next token is token 'a' (0xfffff468e7d0 'a')
-0xfffff468e710->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e7d0 }
-0xfffff468e6c0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e710, 0xfffff468e7d0 }
-0xfffff468e6c0->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e6c0, 0xfffff468e710, 0xfffff468e7d0 }
-0xfffff468e7d0->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e710, 0xfffff468e7d0 }
-Shifting token 'a' (0xfffff468e710 'a')
-0xaaaaf87cef40->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e710 }
-0xfffff468e698->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40, 0xfffff468e710 }
-0xfffff468e698->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40, 0xfffff468e698, 0xfffff468e710 }
-0xfffff468e710->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40, 0xfffff468e710 }
+0xffffd50b7c60->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20 }
+0xffffd50b7d30->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xffffd50b7c60 }
+0xffffd50b7c60->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xffffd50b7c60, 0xffffd50b7d30 }
+Next token is token 'a' (0xffffd50b7d30 'a')
+0xffffd50b7c50->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xffffd50b7d30 }
+0xffffd50b7d30->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xffffd50b7c50, 0xffffd50b7d30 }
+Shifting token 'a' (0xffffd50b7c50 'a')
+0xaaaaf1bfff40->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xffffd50b7c50 }
+0xffffd50b7c50->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xaaaaf1bfff40, 0xffffd50b7c50 }
 Entering state 2
 Stack now 0 11 11 11 2
-0xfffff468e7f0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40 }
+0xffffd50b7d50->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xaaaaf1bfff40 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaf87cef40 'a')
--> $$ = nterm item (0xfffff468e7f0 'a')
-0xaaaaf87cef40->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40, 0xfffff468e7f0 }
-0xaaaaf87cef40->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e7f0 }
-0xfffff468e7a0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40, 0xfffff468e7f0 }
-0xfffff468e7a0->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40, 0xfffff468e7a0, 0xfffff468e7f0 }
-0xfffff468e7f0->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40, 0xfffff468e7f0 }
+   $1 = token 'a' (0xaaaaf1bfff40 'a')
+-> $$ = nterm item (0xffffd50b7d50 'a')
+0xaaaaf1bfff40->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xaaaaf1bfff40, 0xffffd50b7d50 }
+0xaaaaf1bfff40->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xffffd50b7d50 }
+0xffffd50b7d50->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xaaaaf1bfff40, 0xffffd50b7d50 }
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xfffff468e700->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40 }
-0xfffff468e7d0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40, 0xfffff468e700 }
-0xfffff468e700->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40, 0xfffff468e700, 0xfffff468e7d0 }
-Next token is token 'p' (0xfffff468e7d0 'p'Exception caught: cleaning lookahead and stack
-0xaaaaf87cef40->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40, 0xfffff468e7d0 }
-0xaaaaf87cef20->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e7d0 }
-0xaaaaf87cef00->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e7d0 }
-0xaaaaf87ceee0->Object::~Object { 0xaaaaf87ceee0, 0xfffff468e7d0 }
-0xfffff468e7d0->Object::~Object { 0xfffff468e7d0 }
+0xffffd50b7c60->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xaaaaf1bfff40 }
+0xffffd50b7d30->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xaaaaf1bfff40, 0xffffd50b7c60 }
+0xffffd50b7c60->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xaaaaf1bfff40, 0xffffd50b7c60, 0xffffd50b7d30 }
+Next token is token 'p' (0xffffd50b7d30 'p'Exception caught: cleaning lookahead and stack
+0xaaaaf1bfff40->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xaaaaf1bfff40, 0xffffd50b7d30 }
+0xaaaaf1bfff20->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xffffd50b7d30 }
+0xaaaaf1bfff00->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xffffd50b7d30 }
+0xaaaaf1bffee0->Object::~Object { 0xaaaaf1bffee0, 0xffffd50b7d30 }
+0xffffd50b7d30->Object::~Object { 0xffffd50b7d30 }
 exception caught: printer
 end { }
 ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-0xfffff468e700->Object::Object { }
-0xfffff468e7d0->Object::Object { 0xfffff468e700 }
-0xfffff468e700->Object::~Object { 0xfffff468e700, 0xfffff468e7d0 }
-Next token is token 'a' (0xfffff468e7d0 'a')
-0xfffff468e710->Object::Object { 0xfffff468e7d0 }
-0xfffff468e6c0->Object::Object { 0xfffff468e710, 0xfffff468e7d0 }
-0xfffff468e6c0->Object::~Object { 0xfffff468e6c0, 0xfffff468e710, 0xfffff468e7d0 }
-0xfffff468e7d0->Object::~Object { 0xfffff468e710, 0xfffff468e7d0 }
-Shifting token 'a' (0xfffff468e710 'a')
-0xaaaaf87ceee0->Object::Object { 0xfffff468e710 }
-0xfffff468e698->Object::Object { 0xaaaaf87ceee0, 0xfffff468e710 }
-0xfffff468e698->Object::~Object { 0xaaaaf87ceee0, 0xfffff468e698, 0xfffff468e710 }
-0xfffff468e710->Object::~Object { 0xaaaaf87ceee0, 0xfffff468e710 }
+0xffffd50b7c60->Object::Object { }
+0xffffd50b7d30->Object::Object { 0xffffd50b7c60 }
+0xffffd50b7c60->Object::~Object { 0xffffd50b7c60, 0xffffd50b7d30 }
+Next token is token 'a' (0xffffd50b7d30 'a')
+0xffffd50b7c50->Object::Object { 0xffffd50b7d30 }
+0xffffd50b7d30->Object::~Object { 0xffffd50b7c50, 0xffffd50b7d30 }
+Shifting token 'a' (0xffffd50b7c50 'a')
+0xaaaaf1bffee0->Object::Object { 0xffffd50b7c50 }
+0xffffd50b7c50->Object::~Object { 0xaaaaf1bffee0, 0xffffd50b7c50 }
 Entering state 2
 Stack now 0 2
-0xfffff468e7f0->Object::Object { 0xaaaaf87ceee0 }
+0xffffd50b7d50->Object::Object { 0xaaaaf1bffee0 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaf87ceee0 'a')
--> $$ = nterm item (0xfffff468e7f0 'a')
-0xaaaaf87ceee0->Object::~Object { 0xaaaaf87ceee0, 0xfffff468e7f0 }
-0xaaaaf87ceee0->Object::Object { 0xfffff468e7f0 }
-0xfffff468e7a0->Object::Object { 0xaaaaf87ceee0, 0xfffff468e7f0 }
-0xfffff468e7a0->Object::~Object { 0xaaaaf87ceee0, 0xfffff468e7a0, 0xfffff468e7f0 }
-0xfffff468e7f0->Object::~Object { 0xaaaaf87ceee0, 0xfffff468e7f0 }
+   $1 = token 'a' (0xaaaaf1bffee0 'a')
+-> $$ = nterm item (0xffffd50b7d50 'a')
+0xaaaaf1bffee0->Object::~Object { 0xaaaaf1bffee0, 0xffffd50b7d50 }
+0xaaaaf1bffee0->Object::Object { 0xffffd50b7d50 }
+0xffffd50b7d50->Object::~Object { 0xaaaaf1bffee0, 0xffffd50b7d50 }
 Entering state 11
 Stack now 0 11
 Reading a token
-0xfffff468e700->Object::Object { 0xaaaaf87ceee0 }
-0xfffff468e7d0->Object::Object { 0xaaaaf87ceee0, 0xfffff468e700 }
-0xfffff468e700->Object::~Object { 0xaaaaf87ceee0, 0xfffff468e700, 0xfffff468e7d0 }
-Next token is token 'a' (0xfffff468e7d0 'a')
-0xfffff468e710->Object::Object { 0xaaaaf87ceee0, 0xfffff468e7d0 }
-0xfffff468e6c0->Object::Object { 0xaaaaf87ceee0, 0xfffff468e710, 0xfffff468e7d0 }
-0xfffff468e6c0->Object::~Object { 0xaaaaf87ceee0, 0xfffff468e6c0, 0xfffff468e710, 0xfffff468e7d0 }
-0xfffff468e7d0->Object::~Object { 0xaaaaf87ceee0, 0xfffff468e710, 0xfffff468e7d0 }
-Shifting token 'a' (0xfffff468e710 'a')
-0xaaaaf87cef00->Object::Object { 0xaaaaf87ceee0, 0xfffff468e710 }
-0xfffff468e698->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e710 }
-0xfffff468e698->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e698, 0xfffff468e710 }
-0xfffff468e710->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e710 }
+0xffffd50b7c60->Object::Object { 0xaaaaf1bffee0 }
+0xffffd50b7d30->Object::Object { 0xaaaaf1bffee0, 0xffffd50b7c60 }
+0xffffd50b7c60->Object::~Object { 0xaaaaf1bffee0, 0xffffd50b7c60, 0xffffd50b7d30 }
+Next token is token 'a' (0xffffd50b7d30 'a')
+0xffffd50b7c50->Object::Object { 0xaaaaf1bffee0, 0xffffd50b7d30 }
+0xffffd50b7d30->Object::~Object { 0xaaaaf1bffee0, 0xffffd50b7c50, 0xffffd50b7d30 }
+Shifting token 'a' (0xffffd50b7c50 'a')
+0xaaaaf1bfff00->Object::Object { 0xaaaaf1bffee0, 0xffffd50b7c50 }
+0xffffd50b7c50->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xffffd50b7c50 }
 Entering state 2
 Stack now 0 11 2
-0xfffff468e7f0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00 }
+0xffffd50b7d50->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaf87cef00 'a')
--> $$ = nterm item (0xfffff468e7f0 'a')
-0xaaaaf87cef00->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e7f0 }
-0xaaaaf87cef00->Object::Object { 0xaaaaf87ceee0, 0xfffff468e7f0 }
-0xfffff468e7a0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e7f0 }
-0xfffff468e7a0->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e7a0, 0xfffff468e7f0 }
-0xfffff468e7f0->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e7f0 }
+   $1 = token 'a' (0xaaaaf1bfff00 'a')
+-> $$ = nterm item (0xffffd50b7d50 'a')
+0xaaaaf1bfff00->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xffffd50b7d50 }
+0xaaaaf1bfff00->Object::Object { 0xaaaaf1bffee0, 0xffffd50b7d50 }
+0xffffd50b7d50->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xffffd50b7d50 }
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xfffff468e700->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00 }
-0xfffff468e7d0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e700 }
-0xfffff468e700->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e700, 0xfffff468e7d0 }
-Next token is token 'a' (0xfffff468e7d0 'a')
-0xfffff468e710->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e7d0 }
-0xfffff468e6c0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e710, 0xfffff468e7d0 }
-0xfffff468e6c0->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e6c0, 0xfffff468e710, 0xfffff468e7d0 }
-0xfffff468e7d0->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e710, 0xfffff468e7d0 }
-Shifting token 'a' (0xfffff468e710 'a')
-0xaaaaf87cef20->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e710 }
-0xfffff468e698->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e710 }
-0xfffff468e698->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e698, 0xfffff468e710 }
-0xfffff468e710->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e710 }
+0xffffd50b7c60->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00 }
+0xffffd50b7d30->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xffffd50b7c60 }
+0xffffd50b7c60->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xffffd50b7c60, 0xffffd50b7d30 }
+Next token is token 'a' (0xffffd50b7d30 'a')
+0xffffd50b7c50->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xffffd50b7d30 }
+0xffffd50b7d30->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xffffd50b7c50, 0xffffd50b7d30 }
+Shifting token 'a' (0xffffd50b7c50 'a')
+0xaaaaf1bfff20->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xffffd50b7c50 }
+0xffffd50b7c50->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xffffd50b7c50 }
 Entering state 2
 Stack now 0 11 11 2
-0xfffff468e7f0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20 }
+0xffffd50b7d50->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaf87cef20 'a')
--> $$ = nterm item (0xfffff468e7f0 'a')
-0xaaaaf87cef20->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e7f0 }
-0xaaaaf87cef20->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e7f0 }
-0xfffff468e7a0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e7f0 }
-0xfffff468e7a0->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e7a0, 0xfffff468e7f0 }
-0xfffff468e7f0->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e7f0 }
+   $1 = token 'a' (0xaaaaf1bfff20 'a')
+-> $$ = nterm item (0xffffd50b7d50 'a')
+0xaaaaf1bfff20->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xffffd50b7d50 }
+0xaaaaf1bfff20->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xffffd50b7d50 }
+0xffffd50b7d50->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xffffd50b7d50 }
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xfffff468e700->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20 }
-0xfffff468e7d0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e700 }
-0xfffff468e700->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e700, 0xfffff468e7d0 }
-Next token is token 'a' (0xfffff468e7d0 'a')
-0xfffff468e710->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e7d0 }
-0xfffff468e6c0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e710, 0xfffff468e7d0 }
-0xfffff468e6c0->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e6c0, 0xfffff468e710, 0xfffff468e7d0 }
-0xfffff468e7d0->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e710, 0xfffff468e7d0 }
-Shifting token 'a' (0xfffff468e710 'a')
-0xaaaaf87cef40->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e710 }
-0xfffff468e698->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40, 0xfffff468e710 }
-0xfffff468e698->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40, 0xfffff468e698, 0xfffff468e710 }
-0xfffff468e710->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40, 0xfffff468e710 }
+0xffffd50b7c60->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20 }
+0xffffd50b7d30->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xffffd50b7c60 }
+0xffffd50b7c60->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xffffd50b7c60, 0xffffd50b7d30 }
+Next token is token 'a' (0xffffd50b7d30 'a')
+0xffffd50b7c50->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xffffd50b7d30 }
+0xffffd50b7d30->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xffffd50b7c50, 0xffffd50b7d30 }
+Shifting token 'a' (0xffffd50b7c50 'a')
+0xaaaaf1bfff40->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xffffd50b7c50 }
+0xffffd50b7c50->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xaaaaf1bfff40, 0xffffd50b7c50 }
 Entering state 2
 Stack now 0 11 11 11 2
-0xfffff468e7f0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40 }
+0xffffd50b7d50->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xaaaaf1bfff40 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaf87cef40 'a')
--> $$ = nterm item (0xfffff468e7f0 'a')
-0xaaaaf87cef40->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40, 0xfffff468e7f0 }
-0xaaaaf87cef40->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e7f0 }
-0xfffff468e7a0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40, 0xfffff468e7f0 }
-0xfffff468e7a0->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40, 0xfffff468e7a0, 0xfffff468e7f0 }
-0xfffff468e7f0->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40, 0xfffff468e7f0 }
+   $1 = token 'a' (0xaaaaf1bfff40 'a')
+-> $$ = nterm item (0xffffd50b7d50 'a')
+0xaaaaf1bfff40->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xaaaaf1bfff40, 0xffffd50b7d50 }
+0xaaaaf1bfff40->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xffffd50b7d50 }
+0xffffd50b7d50->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xaaaaf1bfff40, 0xffffd50b7d50 }
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xfffff468e700->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40 }
-0xfffff468e7d0->Object::Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40, 0xfffff468e700 }
-0xfffff468e700->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40, 0xfffff468e700, 0xfffff468e7d0 }
-Next token is token 'p' (0xfffff468e7d0 'p'Exception caught: cleaning lookahead and stack
-0xaaaaf87cef40->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xaaaaf87cef40, 0xfffff468e7d0 }
-0xaaaaf87cef20->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xaaaaf87cef20, 0xfffff468e7d0 }
-0xaaaaf87cef00->Object::~Object { 0xaaaaf87ceee0, 0xaaaaf87cef00, 0xfffff468e7d0 }
-0xaaaaf87ceee0->Object::~Object { 0xaaaaf87ceee0, 0xfffff468e7d0 }
-0xfffff468e7d0->Object::~Object { 0xfffff468e7d0 }
+0xffffd50b7c60->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xaaaaf1bfff40 }
+0xffffd50b7d30->Object::Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xaaaaf1bfff40, 0xffffd50b7c60 }
+0xffffd50b7c60->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xaaaaf1bfff40, 0xffffd50b7c60, 0xffffd50b7d30 }
+Next token is token 'p' (0xffffd50b7d30 'p'Exception caught: cleaning lookahead and stack
+0xaaaaf1bfff40->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xaaaaf1bfff40, 0xffffd50b7d30 }
+0xaaaaf1bfff20->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xaaaaf1bfff20, 0xffffd50b7d30 }
+0xaaaaf1bfff00->Object::~Object { 0xaaaaf1bffee0, 0xaaaaf1bfff00, 0xffffd50b7d30 }
+0xaaaaf1bffee0->Object::~Object { 0xaaaaf1bffee0, 0xffffd50b7d30 }
+0xffffd50b7d30->Object::~Object { 0xffffd50b7d30 }
 exception caught: printer
 end { }
 ./c++.at:1362: grep '^exception caught: printer$' stderr
 stdout:
-./c++.at:1064:  $PREPARSER ./input < in
-stdout:
 exception caught: printer
 ./c++.at:1362:  $PREPARSER ./input aaaae
 stderr:
+exception caught: syntax error
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./c++.at:1362:  $PREPARSER ./input aaaaE
+stdout:
+stderr:
+./c++.at:1066: ./check
+exception caught: syntax error, unexpected end of file, expecting 'a'
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
+./c++.at:1362:  $PREPARSER ./input aaaaT
+stderr:
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1362:  $PREPARSER ./input aaaaR
+stderr:
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:1362: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./c++.at:851:  $PREPARSER ./input
+stderr:
+./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:851: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./c++.at:1064:  $PREPARSER ./input < in
 stderr:
 error: invalid expression
 caught error
 error: invalid character
 caught error
 ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-exception caught: syntax error
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1064:  $PREPARSER ./input < in
 stderr:
 error: invalid expression
 ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1362:  $PREPARSER ./input aaaaE
-stderr:
-exception caught: syntax error, unexpected end of file, expecting 'a'
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1064:  $PREPARSER ./input < in
 stderr:
-./c++.at:1362:  $PREPARSER ./input aaaaT
 error: invalid character
 ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:1064: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
-./c++.at:1362:  $PREPARSER ./input aaaaR
-stderr:
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:1362: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./c++.at:1361:  $PREPARSER ./input aaaas
@@ -254505,127 +254109,134 @@
 ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1361:  $PREPARSER ./input --debug aaaap
 stderr:
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-0xaaab00157b40->Object::Object { }
-Next token is token 'a' (0xaaab00157b40 'a')
-Shifting token 'a' (0xaaab00157b40 'a')
+0xaaaafccf1b40->Object::Object { }
+Next token is token 'a' (0xaaaafccf1b40 'a')
+Shifting token 'a' (0xaaaafccf1b40 'a')
 Entering state 1
 Stack now 0 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab00157b40 'a')
--> $$ = nterm item (0xaaab00157b40 'a')
+   $1 = token 'a' (0xaaaafccf1b40 'a')
+-> $$ = nterm item (0xaaaafccf1b40 'a')
 Entering state 10
 Stack now 0 10
 Reading a token
-0xaaab00157b90->Object::Object { 0xaaab00157b40 }
-Next token is token 'a' (0xaaab00157b90 'a')
-Shifting token 'a' (0xaaab00157b90 'a')
+0xaaaafccf1b90->Object::Object { 0xaaaafccf1b40 }
+Next token is token 'a' (0xaaaafccf1b90 'a')
+Shifting token 'a' (0xaaaafccf1b90 'a')
 Entering state 1
 Stack now 0 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab00157b90 'a')
--> $$ = nterm item (0xaaab00157b90 'a')
+   $1 = token 'a' (0xaaaafccf1b90 'a')
+-> $$ = nterm item (0xaaaafccf1b90 'a')
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xaaab00157be0->Object::Object { 0xaaab00157b40, 0xaaab00157b90 }
-Next token is token 'a' (0xaaab00157be0 'a')
-Shifting token 'a' (0xaaab00157be0 'a')
+0xaaaafccf1be0->Object::Object { 0xaaaafccf1b40, 0xaaaafccf1b90 }
+Next token is token 'a' (0xaaaafccf1be0 'a')
+Shifting token 'a' (0xaaaafccf1be0 'a')
 Entering state 1
 Stack now 0 10 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab00157be0 'a')
--> $$ = nterm item (0xaaab00157be0 'a')
+   $1 = token 'a' (0xaaaafccf1be0 'a')
+-> $$ = nterm item (0xaaaafccf1be0 'a')
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xaaab00157c30->Object::Object { 0xaaab00157b40, 0xaaab00157b90, 0xaaab00157be0 }
-Next token is token 'a' (0xaaab00157c30 'a')
-Shifting token 'a' (0xaaab00157c30 'a')
+0xaaaafccf1c30->Object::Object { 0xaaaafccf1b40, 0xaaaafccf1b90, 0xaaaafccf1be0 }
+Next token is token 'a' (0xaaaafccf1c30 'a')
+Shifting token 'a' (0xaaaafccf1c30 'a')
 Entering state 1
 Stack now 0 10 10 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab00157c30 'a')
--> $$ = nterm item (0xaaab00157c30 'a')
+   $1 = token 'a' (0xaaaafccf1c30 'a')
+-> $$ = nterm item (0xaaaafccf1c30 'a')
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xaaab00157c80->Object::Object { 0xaaab00157b40, 0xaaab00157b90, 0xaaab00157be0, 0xaaab00157c30 }
-Next token is token 'p' (0xaaab00157c80 'p'Exception caught: cleaning lookahead and stack
-0xaaab00157c80->Object::~Object { 0xaaab00157b40, 0xaaab00157b90, 0xaaab00157be0, 0xaaab00157c30, 0xaaab00157c80 }
-0xaaab00157c30->Object::~Object { 0xaaab00157b40, 0xaaab00157b90, 0xaaab00157be0, 0xaaab00157c30 }
-0xaaab00157be0->Object::~Object { 0xaaab00157b40, 0xaaab00157b90, 0xaaab00157be0 }
-0xaaab00157b90->Object::~Object { 0xaaab00157b40, 0xaaab00157b90 }
-0xaaab00157b40->Object::~Object { 0xaaab00157b40 }
+0xaaaafccf1c80->Object::Object { 0xaaaafccf1b40, 0xaaaafccf1b90, 0xaaaafccf1be0, 0xaaaafccf1c30 }
+Next token is token 'p' (0xaaaafccf1c80 'p'Exception caught: cleaning lookahead and stack
+0xaaaafccf1c80->Object::~Object { 0xaaaafccf1b40, 0xaaaafccf1b90, 0xaaaafccf1be0, 0xaaaafccf1c30, 0xaaaafccf1c80 }
+0xaaaafccf1c30->Object::~Object { 0xaaaafccf1b40, 0xaaaafccf1b90, 0xaaaafccf1be0, 0xaaaafccf1c30 }
+0xaaaafccf1be0->Object::~Object { 0xaaaafccf1b40, 0xaaaafccf1b90, 0xaaaafccf1be0 }
+0xaaaafccf1b90->Object::~Object { 0xaaaafccf1b40, 0xaaaafccf1b90 }
+0xaaaafccf1b40->Object::~Object { 0xaaaafccf1b40 }
 exception caught: printer
 end { }
+stdout:
 ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:857:  $PREPARSER ./input
 stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-0xaaab00157b40->Object::Object { }
-Next token is token 'a' (0xaaab00157b40 'a')
-Shifting token 'a' (0xaaab00157b40 'a')
+0xaaaafccf1b40->Object::Object { }
+Next token is token 'a' (0xaaaafccf1b40 'a')
+Shifting token 'a' (0xaaaafccf1b40 'a')
 Entering state 1
 Stack now 0 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab00157b40 'a')
--> $$ = nterm item (0xaaab00157b40 'a')
+   $1 = token 'a' (0xaaaafccf1b40 'a')
+-> $$ = nterm item (0xaaaafccf1b40 'a')
 Entering state 10
 Stack now 0 10
 Reading a token
-0xaaab00157b90->Object::Object { 0xaaab00157b40 }
-Next token is token 'a' (0xaaab00157b90 'a')
-Shifting token 'a' (0xaaab00157b90 'a')
+0xaaaafccf1b90->Object::Object { 0xaaaafccf1b40 }
+Next token is token 'a' (0xaaaafccf1b90 'a')
+Shifting token 'a' (0xaaaafccf1b90 'a')
 Entering state 1
 Stack now 0 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab00157b90 'a')
--> $$ = nterm item (0xaaab00157b90 'a')
+   $1 = token 'a' (0xaaaafccf1b90 'a')
+-> $$ = nterm item (0xaaaafccf1b90 'a')
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xaaab00157be0->Object::Object { 0xaaab00157b40, 0xaaab00157b90 }
-Next token is token 'a' (0xaaab00157be0 'a')
-Shifting token 'a' (0xaaab00157be0 'a')
+0xaaaafccf1be0->Object::Object { 0xaaaafccf1b40, 0xaaaafccf1b90 }
+Next token is token 'a' (0xaaaafccf1be0 'a')
+Shifting token 'a' (0xaaaafccf1be0 'a')
 Entering state 1
 Stack now 0 10 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab00157be0 'a')
--> $$ = nterm item (0xaaab00157be0 'a')
+   $1 = token 'a' (0xaaaafccf1be0 'a')
+-> $$ = nterm item (0xaaaafccf1be0 'a')
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xaaab00157c30->Object::Object { 0xaaab00157b40, 0xaaab00157b90, 0xaaab00157be0 }
-Next token is token 'a' (0xaaab00157c30 'a')
-Shifting token 'a' (0xaaab00157c30 'a')
+0xaaaafccf1c30->Object::Object { 0xaaaafccf1b40, 0xaaaafccf1b90, 0xaaaafccf1be0 }
+Next token is token 'a' (0xaaaafccf1c30 'a')
+Shifting token 'a' (0xaaaafccf1c30 'a')
 Entering state 1
 Stack now 0 10 10 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab00157c30 'a')
--> $$ = nterm item (0xaaab00157c30 'a')
+   $1 = token 'a' (0xaaaafccf1c30 'a')
+-> $$ = nterm item (0xaaaafccf1c30 'a')
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xaaab00157c80->Object::Object { 0xaaab00157b40, 0xaaab00157b90, 0xaaab00157be0, 0xaaab00157c30 }
-Next token is token 'p' (0xaaab00157c80 'p'Exception caught: cleaning lookahead and stack
-0xaaab00157c80->Object::~Object { 0xaaab00157b40, 0xaaab00157b90, 0xaaab00157be0, 0xaaab00157c30, 0xaaab00157c80 }
-0xaaab00157c30->Object::~Object { 0xaaab00157b40, 0xaaab00157b90, 0xaaab00157be0, 0xaaab00157c30 }
-0xaaab00157be0->Object::~Object { 0xaaab00157b40, 0xaaab00157b90, 0xaaab00157be0 }
-0xaaab00157b90->Object::~Object { 0xaaab00157b40, 0xaaab00157b90 }
-0xaaab00157b40->Object::~Object { 0xaaab00157b40 }
+0xaaaafccf1c80->Object::Object { 0xaaaafccf1b40, 0xaaaafccf1b90, 0xaaaafccf1be0, 0xaaaafccf1c30 }
+Next token is token 'p' (0xaaaafccf1c80 'p'Exception caught: cleaning lookahead and stack
+0xaaaafccf1c80->Object::~Object { 0xaaaafccf1b40, 0xaaaafccf1b90, 0xaaaafccf1be0, 0xaaaafccf1c30, 0xaaaafccf1c80 }
+0xaaaafccf1c30->Object::~Object { 0xaaaafccf1b40, 0xaaaafccf1b90, 0xaaaafccf1be0, 0xaaaafccf1c30 }
+0xaaaafccf1be0->Object::~Object { 0xaaaafccf1b40, 0xaaaafccf1b90, 0xaaaafccf1be0 }
+0xaaaafccf1b90->Object::~Object { 0xaaaafccf1b40, 0xaaaafccf1b90 }
+0xaaaafccf1b40->Object::~Object { 0xaaaafccf1b40 }
 exception caught: printer
 end { }
 ./c++.at:1361: grep '^exception caught: printer$' stderr
 stdout:
+stderr:
 exception caught: printer
 ./c++.at:1361:  $PREPARSER ./input aaaae
+./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+======== Testing with C++ standard flags: ''
+./c++.at:857: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 exception caught: syntax error
 ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1361:  $PREPARSER ./input aaaaE
@@ -254642,11 +254253,32 @@
 ./c++.at:1361: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:857:  $PREPARSER ./input
+./c++.at:851:  $PREPARSER ./input
 stderr:
-./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+678. c++.at:848:  ok
+
+stderr:
+stdout:
+./c++.at:1065:  $PREPARSER ./input < in
+stderr:
+error: invalid expression
+caught error
+error: invalid character
+caught error
+./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1065:  $PREPARSER ./input < in
+stderr:
+error: invalid expression
+./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+688. c++.at:1363: testing Exception safety without error recovery api.value.type=variant ...
+./c++.at:1363: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS
+./c++.at:1065:  $PREPARSER ./input < in
+stderr:
+error: invalid character
+./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:857: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./c++.at:1065: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
 stderr:
 stdout:
 ./c++.at:1360:  $PREPARSER ./input aaaas
@@ -254670,57 +254302,57 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xaaaac7d7eb40->Object::Object { }
-Next token is token 'a' (0xaaaac7d7eb40 'a')
-Shifting token 'a' (0xaaaac7d7eb40 'a')
+0xaaab045c7b40->Object::Object { }
+Next token is token 'a' (0xaaab045c7b40 'a')
+Shifting token 'a' (0xaaab045c7b40 'a')
 Entering state 2
 Stack now 0 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaac7d7eb40 'a')
--> $$ = nterm item (0xaaaac7d7eb40 'a')
+   $1 = token 'a' (0xaaab045c7b40 'a')
+-> $$ = nterm item (0xaaab045c7b40 'a')
 Entering state 11
 Stack now 0 11
 Reading a token
-0xaaaac7d7eb90->Object::Object { 0xaaaac7d7eb40 }
-Next token is token 'a' (0xaaaac7d7eb90 'a')
-Shifting token 'a' (0xaaaac7d7eb90 'a')
+0xaaab045c7b90->Object::Object { 0xaaab045c7b40 }
+Next token is token 'a' (0xaaab045c7b90 'a')
+Shifting token 'a' (0xaaab045c7b90 'a')
 Entering state 2
 Stack now 0 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaac7d7eb90 'a')
--> $$ = nterm item (0xaaaac7d7eb90 'a')
+   $1 = token 'a' (0xaaab045c7b90 'a')
+-> $$ = nterm item (0xaaab045c7b90 'a')
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xaaaac7d7ebe0->Object::Object { 0xaaaac7d7eb40, 0xaaaac7d7eb90 }
-Next token is token 'a' (0xaaaac7d7ebe0 'a')
-Shifting token 'a' (0xaaaac7d7ebe0 'a')
+0xaaab045c7be0->Object::Object { 0xaaab045c7b40, 0xaaab045c7b90 }
+Next token is token 'a' (0xaaab045c7be0 'a')
+Shifting token 'a' (0xaaab045c7be0 'a')
 Entering state 2
 Stack now 0 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaac7d7ebe0 'a')
--> $$ = nterm item (0xaaaac7d7ebe0 'a')
+   $1 = token 'a' (0xaaab045c7be0 'a')
+-> $$ = nterm item (0xaaab045c7be0 'a')
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xaaaac7d7ec30->Object::Object { 0xaaaac7d7eb40, 0xaaaac7d7eb90, 0xaaaac7d7ebe0 }
-Next token is token 'a' (0xaaaac7d7ec30 'a')
-Shifting token 'a' (0xaaaac7d7ec30 'a')
+0xaaab045c7c30->Object::Object { 0xaaab045c7b40, 0xaaab045c7b90, 0xaaab045c7be0 }
+Next token is token 'a' (0xaaab045c7c30 'a')
+Shifting token 'a' (0xaaab045c7c30 'a')
 Entering state 2
 Stack now 0 11 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaac7d7ec30 'a')
--> $$ = nterm item (0xaaaac7d7ec30 'a')
+   $1 = token 'a' (0xaaab045c7c30 'a')
+-> $$ = nterm item (0xaaab045c7c30 'a')
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xaaaac7d7ec80->Object::Object { 0xaaaac7d7eb40, 0xaaaac7d7eb90, 0xaaaac7d7ebe0, 0xaaaac7d7ec30 }
-Next token is token 'p' (0xaaaac7d7ec80 'p'Exception caught: cleaning lookahead and stack
-0xaaaac7d7ec80->Object::~Object { 0xaaaac7d7eb40, 0xaaaac7d7eb90, 0xaaaac7d7ebe0, 0xaaaac7d7ec30, 0xaaaac7d7ec80 }
-0xaaaac7d7ec30->Object::~Object { 0xaaaac7d7eb40, 0xaaaac7d7eb90, 0xaaaac7d7ebe0, 0xaaaac7d7ec30 }
-0xaaaac7d7ebe0->Object::~Object { 0xaaaac7d7eb40, 0xaaaac7d7eb90, 0xaaaac7d7ebe0 }
-0xaaaac7d7eb90->Object::~Object { 0xaaaac7d7eb40, 0xaaaac7d7eb90 }
-0xaaaac7d7eb40->Object::~Object { 0xaaaac7d7eb40 }
+0xaaab045c7c80->Object::Object { 0xaaab045c7b40, 0xaaab045c7b90, 0xaaab045c7be0, 0xaaab045c7c30 }
+Next token is token 'p' (0xaaab045c7c80 'p'Exception caught: cleaning lookahead and stack
+0xaaab045c7c80->Object::~Object { 0xaaab045c7b40, 0xaaab045c7b90, 0xaaab045c7be0, 0xaaab045c7c30, 0xaaab045c7c80 }
+0xaaab045c7c30->Object::~Object { 0xaaab045c7b40, 0xaaab045c7b90, 0xaaab045c7be0, 0xaaab045c7c30 }
+0xaaab045c7be0->Object::~Object { 0xaaab045c7b40, 0xaaab045c7b90, 0xaaab045c7be0 }
+0xaaab045c7b90->Object::~Object { 0xaaab045c7b40, 0xaaab045c7b90 }
+0xaaab045c7b40->Object::~Object { 0xaaab045c7b40 }
 exception caught: printer
 end { }
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -254729,57 +254361,57 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xaaaac7d7eb40->Object::Object { }
-Next token is token 'a' (0xaaaac7d7eb40 'a')
-Shifting token 'a' (0xaaaac7d7eb40 'a')
+0xaaab045c7b40->Object::Object { }
+Next token is token 'a' (0xaaab045c7b40 'a')
+Shifting token 'a' (0xaaab045c7b40 'a')
 Entering state 2
 Stack now 0 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaac7d7eb40 'a')
--> $$ = nterm item (0xaaaac7d7eb40 'a')
+   $1 = token 'a' (0xaaab045c7b40 'a')
+-> $$ = nterm item (0xaaab045c7b40 'a')
 Entering state 11
 Stack now 0 11
 Reading a token
-0xaaaac7d7eb90->Object::Object { 0xaaaac7d7eb40 }
-Next token is token 'a' (0xaaaac7d7eb90 'a')
-Shifting token 'a' (0xaaaac7d7eb90 'a')
+0xaaab045c7b90->Object::Object { 0xaaab045c7b40 }
+Next token is token 'a' (0xaaab045c7b90 'a')
+Shifting token 'a' (0xaaab045c7b90 'a')
 Entering state 2
 Stack now 0 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaac7d7eb90 'a')
--> $$ = nterm item (0xaaaac7d7eb90 'a')
+   $1 = token 'a' (0xaaab045c7b90 'a')
+-> $$ = nterm item (0xaaab045c7b90 'a')
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xaaaac7d7ebe0->Object::Object { 0xaaaac7d7eb40, 0xaaaac7d7eb90 }
-Next token is token 'a' (0xaaaac7d7ebe0 'a')
-Shifting token 'a' (0xaaaac7d7ebe0 'a')
+0xaaab045c7be0->Object::Object { 0xaaab045c7b40, 0xaaab045c7b90 }
+Next token is token 'a' (0xaaab045c7be0 'a')
+Shifting token 'a' (0xaaab045c7be0 'a')
 Entering state 2
 Stack now 0 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaac7d7ebe0 'a')
--> $$ = nterm item (0xaaaac7d7ebe0 'a')
+   $1 = token 'a' (0xaaab045c7be0 'a')
+-> $$ = nterm item (0xaaab045c7be0 'a')
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xaaaac7d7ec30->Object::Object { 0xaaaac7d7eb40, 0xaaaac7d7eb90, 0xaaaac7d7ebe0 }
-Next token is token 'a' (0xaaaac7d7ec30 'a')
-Shifting token 'a' (0xaaaac7d7ec30 'a')
+0xaaab045c7c30->Object::Object { 0xaaab045c7b40, 0xaaab045c7b90, 0xaaab045c7be0 }
+Next token is token 'a' (0xaaab045c7c30 'a')
+Shifting token 'a' (0xaaab045c7c30 'a')
 Entering state 2
 Stack now 0 11 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaac7d7ec30 'a')
--> $$ = nterm item (0xaaaac7d7ec30 'a')
+   $1 = token 'a' (0xaaab045c7c30 'a')
+-> $$ = nterm item (0xaaab045c7c30 'a')
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xaaaac7d7ec80->Object::Object { 0xaaaac7d7eb40, 0xaaaac7d7eb90, 0xaaaac7d7ebe0, 0xaaaac7d7ec30 }
-Next token is token 'p' (0xaaaac7d7ec80 'p'Exception caught: cleaning lookahead and stack
-0xaaaac7d7ec80->Object::~Object { 0xaaaac7d7eb40, 0xaaaac7d7eb90, 0xaaaac7d7ebe0, 0xaaaac7d7ec30, 0xaaaac7d7ec80 }
-0xaaaac7d7ec30->Object::~Object { 0xaaaac7d7eb40, 0xaaaac7d7eb90, 0xaaaac7d7ebe0, 0xaaaac7d7ec30 }
-0xaaaac7d7ebe0->Object::~Object { 0xaaaac7d7eb40, 0xaaaac7d7eb90, 0xaaaac7d7ebe0 }
-0xaaaac7d7eb90->Object::~Object { 0xaaaac7d7eb40, 0xaaaac7d7eb90 }
-0xaaaac7d7eb40->Object::~Object { 0xaaaac7d7eb40 }
+0xaaab045c7c80->Object::Object { 0xaaab045c7b40, 0xaaab045c7b90, 0xaaab045c7be0, 0xaaab045c7c30 }
+Next token is token 'p' (0xaaab045c7c80 'p'Exception caught: cleaning lookahead and stack
+0xaaab045c7c80->Object::~Object { 0xaaab045c7b40, 0xaaab045c7b90, 0xaaab045c7be0, 0xaaab045c7c30, 0xaaab045c7c80 }
+0xaaab045c7c30->Object::~Object { 0xaaab045c7b40, 0xaaab045c7b90, 0xaaab045c7be0, 0xaaab045c7c30 }
+0xaaab045c7be0->Object::~Object { 0xaaab045c7b40, 0xaaab045c7b90, 0xaaab045c7be0 }
+0xaaab045c7b90->Object::~Object { 0xaaab045c7b40, 0xaaab045c7b90 }
+0xaaab045c7b40->Object::~Object { 0xaaab045c7b40 }
 exception caught: printer
 end { }
 ./c++.at:1360: grep '^exception caught: printer$' stderr
@@ -254789,64 +254421,23 @@
 stderr:
 exception caught: syntax error
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 ./c++.at:1360:  $PREPARSER ./input aaaaE
-stdout:
-./c++.at:851:  $PREPARSER ./input
 stderr:
 exception caught: syntax error, unexpected end of file, expecting 'a'
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 ./c++.at:1360:  $PREPARSER ./input aaaaT
-./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:851: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 ./c++.at:1360:  $PREPARSER ./input aaaaR
 stderr:
-./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:1360: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./c++.at:1065:  $PREPARSER ./input < in
-stderr:
-error: invalid expression
-caught error
-error: invalid character
-caught error
-./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1065:  $PREPARSER ./input < in
-stderr:
-error: invalid expression
-./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1065:  $PREPARSER ./input < in
-stderr:
-error: invalid character
-./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:1065: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
-stderr:
-stdout:
-./c++.at:857:  $PREPARSER ./input
-stderr:
-./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:858: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
-======== Testing with C++ standard flags: ''
-./c++.at:858: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
 stdout:
-./c++.at:851:  $PREPARSER ./input
-stderr:
-./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-678. c++.at:848:  ok
 stderr:
-
-stdout:
 ./c++.at:1362:  $PREPARSER ./input aaaas
+./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
 stderr:
 exception caught: reduction
+./c++.at:1360: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1362:  $PREPARSER ./input aaaal
 stderr:
@@ -254856,8 +254447,6 @@
 stderr:
 exception caught: initial-action
 ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-688. c++.at:1363: testing Exception safety without error recovery api.value.type=variant ...
-./c++.at:1363: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS
 ./c++.at:1362:  $PREPARSER ./input aaaap
 stderr:
 ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -254867,123 +254456,123 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xffffcceb1dd0->Object::Object { }
-0xffffcceb1ea0->Object::Object { 0xffffcceb1dd0 }
-0xffffcceb1dd0->Object::~Object { 0xffffcceb1dd0, 0xffffcceb1ea0 }
-Next token is token 'a' (0xffffcceb1ea0 'a')
-0xffffcceb1de0->Object::Object { 0xffffcceb1ea0 }
-0xffffcceb1d90->Object::Object { 0xffffcceb1de0, 0xffffcceb1ea0 }
-0xffffcceb1d90->Object::~Object { 0xffffcceb1d90, 0xffffcceb1de0, 0xffffcceb1ea0 }
-0xffffcceb1ea0->Object::~Object { 0xffffcceb1de0, 0xffffcceb1ea0 }
-Shifting token 'a' (0xffffcceb1de0 'a')
-0xaaaac5fb5ee0->Object::Object { 0xffffcceb1de0 }
-0xffffcceb1d68->Object::Object { 0xaaaac5fb5ee0, 0xffffcceb1de0 }
-0xffffcceb1d68->Object::~Object { 0xaaaac5fb5ee0, 0xffffcceb1d68, 0xffffcceb1de0 }
-0xffffcceb1de0->Object::~Object { 0xaaaac5fb5ee0, 0xffffcceb1de0 }
+0xffffdebb66d0->Object::Object { }
+0xffffdebb67a0->Object::Object { 0xffffdebb66d0 }
+0xffffdebb66d0->Object::~Object { 0xffffdebb66d0, 0xffffdebb67a0 }
+Next token is token 'a' (0xffffdebb67a0 'a')
+0xffffdebb66e0->Object::Object { 0xffffdebb67a0 }
+0xffffdebb6690->Object::Object { 0xffffdebb66e0, 0xffffdebb67a0 }
+0xffffdebb6690->Object::~Object { 0xffffdebb6690, 0xffffdebb66e0, 0xffffdebb67a0 }
+0xffffdebb67a0->Object::~Object { 0xffffdebb66e0, 0xffffdebb67a0 }
+Shifting token 'a' (0xffffdebb66e0 'a')
+0xaaaae0316ee0->Object::Object { 0xffffdebb66e0 }
+0xffffdebb6668->Object::Object { 0xaaaae0316ee0, 0xffffdebb66e0 }
+0xffffdebb6668->Object::~Object { 0xaaaae0316ee0, 0xffffdebb6668, 0xffffdebb66e0 }
+0xffffdebb66e0->Object::~Object { 0xaaaae0316ee0, 0xffffdebb66e0 }
 Entering state 2
 Stack now 0 2
-0xffffcceb1ec0->Object::Object { 0xaaaac5fb5ee0 }
+0xffffdebb67c0->Object::Object { 0xaaaae0316ee0 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaac5fb5ee0 'a')
--> $$ = nterm item (0xffffcceb1ec0 'a')
-0xaaaac5fb5ee0->Object::~Object { 0xaaaac5fb5ee0, 0xffffcceb1ec0 }
-0xaaaac5fb5ee0->Object::Object { 0xffffcceb1ec0 }
-0xffffcceb1e70->Object::Object { 0xaaaac5fb5ee0, 0xffffcceb1ec0 }
-0xffffcceb1e70->Object::~Object { 0xaaaac5fb5ee0, 0xffffcceb1e70, 0xffffcceb1ec0 }
-0xffffcceb1ec0->Object::~Object { 0xaaaac5fb5ee0, 0xffffcceb1ec0 }
+   $1 = token 'a' (0xaaaae0316ee0 'a')
+-> $$ = nterm item (0xffffdebb67c0 'a')
+0xaaaae0316ee0->Object::~Object { 0xaaaae0316ee0, 0xffffdebb67c0 }
+0xaaaae0316ee0->Object::Object { 0xffffdebb67c0 }
+0xffffdebb6770->Object::Object { 0xaaaae0316ee0, 0xffffdebb67c0 }
+0xffffdebb6770->Object::~Object { 0xaaaae0316ee0, 0xffffdebb6770, 0xffffdebb67c0 }
+0xffffdebb67c0->Object::~Object { 0xaaaae0316ee0, 0xffffdebb67c0 }
 Entering state 11
 Stack now 0 11
 Reading a token
-0xffffcceb1dd0->Object::Object { 0xaaaac5fb5ee0 }
-0xffffcceb1ea0->Object::Object { 0xaaaac5fb5ee0, 0xffffcceb1dd0 }
-0xffffcceb1dd0->Object::~Object { 0xaaaac5fb5ee0, 0xffffcceb1dd0, 0xffffcceb1ea0 }
-Next token is token 'a' (0xffffcceb1ea0 'a')
-0xffffcceb1de0->Object::Object { 0xaaaac5fb5ee0, 0xffffcceb1ea0 }
-0xffffcceb1d90->Object::Object { 0xaaaac5fb5ee0, 0xffffcceb1de0, 0xffffcceb1ea0 }
-0xffffcceb1d90->Object::~Object { 0xaaaac5fb5ee0, 0xffffcceb1d90, 0xffffcceb1de0, 0xffffcceb1ea0 }
-0xffffcceb1ea0->Object::~Object { 0xaaaac5fb5ee0, 0xffffcceb1de0, 0xffffcceb1ea0 }
-Shifting token 'a' (0xffffcceb1de0 'a')
-0xaaaac5fb5f00->Object::Object { 0xaaaac5fb5ee0, 0xffffcceb1de0 }
-0xffffcceb1d68->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1de0 }
-0xffffcceb1d68->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1d68, 0xffffcceb1de0 }
-0xffffcceb1de0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1de0 }
+0xffffdebb66d0->Object::Object { 0xaaaae0316ee0 }
+0xffffdebb67a0->Object::Object { 0xaaaae0316ee0, 0xffffdebb66d0 }
+0xffffdebb66d0->Object::~Object { 0xaaaae0316ee0, 0xffffdebb66d0, 0xffffdebb67a0 }
+Next token is token 'a' (0xffffdebb67a0 'a')
+0xffffdebb66e0->Object::Object { 0xaaaae0316ee0, 0xffffdebb67a0 }
+0xffffdebb6690->Object::Object { 0xaaaae0316ee0, 0xffffdebb66e0, 0xffffdebb67a0 }
+0xffffdebb6690->Object::~Object { 0xaaaae0316ee0, 0xffffdebb6690, 0xffffdebb66e0, 0xffffdebb67a0 }
+0xffffdebb67a0->Object::~Object { 0xaaaae0316ee0, 0xffffdebb66e0, 0xffffdebb67a0 }
+Shifting token 'a' (0xffffdebb66e0 'a')
+0xaaaae0316f00->Object::Object { 0xaaaae0316ee0, 0xffffdebb66e0 }
+0xffffdebb6668->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb66e0 }
+0xffffdebb6668->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb6668, 0xffffdebb66e0 }
+0xffffdebb66e0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb66e0 }
 Entering state 2
 Stack now 0 11 2
-0xffffcceb1ec0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00 }
+0xffffdebb67c0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaac5fb5f00 'a')
--> $$ = nterm item (0xffffcceb1ec0 'a')
-0xaaaac5fb5f00->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1ec0 }
-0xaaaac5fb5f00->Object::Object { 0xaaaac5fb5ee0, 0xffffcceb1ec0 }
-0xffffcceb1e70->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1ec0 }
-0xffffcceb1e70->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1e70, 0xffffcceb1ec0 }
-0xffffcceb1ec0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1ec0 }
+   $1 = token 'a' (0xaaaae0316f00 'a')
+-> $$ = nterm item (0xffffdebb67c0 'a')
+0xaaaae0316f00->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb67c0 }
+0xaaaae0316f00->Object::Object { 0xaaaae0316ee0, 0xffffdebb67c0 }
+0xffffdebb6770->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb67c0 }
+0xffffdebb6770->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb6770, 0xffffdebb67c0 }
+0xffffdebb67c0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb67c0 }
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xffffcceb1dd0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00 }
-0xffffcceb1ea0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1dd0 }
-0xffffcceb1dd0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1dd0, 0xffffcceb1ea0 }
-Next token is token 'a' (0xffffcceb1ea0 'a')
-0xffffcceb1de0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1ea0 }
-0xffffcceb1d90->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1de0, 0xffffcceb1ea0 }
-0xffffcceb1d90->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1d90, 0xffffcceb1de0, 0xffffcceb1ea0 }
-0xffffcceb1ea0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1de0, 0xffffcceb1ea0 }
-Shifting token 'a' (0xffffcceb1de0 'a')
-0xaaaac5fb5f20->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1de0 }
-0xffffcceb1d68->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1de0 }
-0xffffcceb1d68->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1d68, 0xffffcceb1de0 }
-0xffffcceb1de0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1de0 }
+0xffffdebb66d0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00 }
+0xffffdebb67a0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb66d0 }
+0xffffdebb66d0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb66d0, 0xffffdebb67a0 }
+Next token is token 'a' (0xffffdebb67a0 'a')
+0xffffdebb66e0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb67a0 }
+0xffffdebb6690->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb66e0, 0xffffdebb67a0 }
+0xffffdebb6690->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb6690, 0xffffdebb66e0, 0xffffdebb67a0 }
+0xffffdebb67a0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb66e0, 0xffffdebb67a0 }
+Shifting token 'a' (0xffffdebb66e0 'a')
+0xaaaae0316f20->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb66e0 }
+0xffffdebb6668->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb66e0 }
+0xffffdebb6668->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb6668, 0xffffdebb66e0 }
+0xffffdebb66e0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb66e0 }
 Entering state 2
 Stack now 0 11 11 2
-0xffffcceb1ec0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20 }
+0xffffdebb67c0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaac5fb5f20 'a')
--> $$ = nterm item (0xffffcceb1ec0 'a')
-0xaaaac5fb5f20->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1ec0 }
-0xaaaac5fb5f20->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1ec0 }
-0xffffcceb1e70->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1ec0 }
-0xffffcceb1e70->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1e70, 0xffffcceb1ec0 }
-0xffffcceb1ec0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1ec0 }
+   $1 = token 'a' (0xaaaae0316f20 'a')
+-> $$ = nterm item (0xffffdebb67c0 'a')
+0xaaaae0316f20->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb67c0 }
+0xaaaae0316f20->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb67c0 }
+0xffffdebb6770->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb67c0 }
+0xffffdebb6770->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb6770, 0xffffdebb67c0 }
+0xffffdebb67c0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb67c0 }
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xffffcceb1dd0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20 }
-0xffffcceb1ea0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1dd0 }
-0xffffcceb1dd0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1dd0, 0xffffcceb1ea0 }
-Next token is token 'a' (0xffffcceb1ea0 'a')
-0xffffcceb1de0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1ea0 }
-0xffffcceb1d90->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1de0, 0xffffcceb1ea0 }
-0xffffcceb1d90->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1d90, 0xffffcceb1de0, 0xffffcceb1ea0 }
-0xffffcceb1ea0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1de0, 0xffffcceb1ea0 }
-Shifting token 'a' (0xffffcceb1de0 'a')
-0xaaaac5fb5f40->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1de0 }
-0xffffcceb1d68->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40, 0xffffcceb1de0 }
-0xffffcceb1d68->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40, 0xffffcceb1d68, 0xffffcceb1de0 }
-0xffffcceb1de0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40, 0xffffcceb1de0 }
+0xffffdebb66d0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20 }
+0xffffdebb67a0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb66d0 }
+0xffffdebb66d0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb66d0, 0xffffdebb67a0 }
+Next token is token 'a' (0xffffdebb67a0 'a')
+0xffffdebb66e0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb67a0 }
+0xffffdebb6690->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb66e0, 0xffffdebb67a0 }
+0xffffdebb6690->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb6690, 0xffffdebb66e0, 0xffffdebb67a0 }
+0xffffdebb67a0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb66e0, 0xffffdebb67a0 }
+Shifting token 'a' (0xffffdebb66e0 'a')
+0xaaaae0316f40->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb66e0 }
+0xffffdebb6668->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40, 0xffffdebb66e0 }
+0xffffdebb6668->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40, 0xffffdebb6668, 0xffffdebb66e0 }
+0xffffdebb66e0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40, 0xffffdebb66e0 }
 Entering state 2
 Stack now 0 11 11 11 2
-0xffffcceb1ec0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40 }
+0xffffdebb67c0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaac5fb5f40 'a')
--> $$ = nterm item (0xffffcceb1ec0 'a')
-0xaaaac5fb5f40->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40, 0xffffcceb1ec0 }
-0xaaaac5fb5f40->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1ec0 }
-0xffffcceb1e70->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40, 0xffffcceb1ec0 }
-0xffffcceb1e70->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40, 0xffffcceb1e70, 0xffffcceb1ec0 }
-0xffffcceb1ec0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40, 0xffffcceb1ec0 }
+   $1 = token 'a' (0xaaaae0316f40 'a')
+-> $$ = nterm item (0xffffdebb67c0 'a')
+0xaaaae0316f40->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40, 0xffffdebb67c0 }
+0xaaaae0316f40->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb67c0 }
+0xffffdebb6770->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40, 0xffffdebb67c0 }
+0xffffdebb6770->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40, 0xffffdebb6770, 0xffffdebb67c0 }
+0xffffdebb67c0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40, 0xffffdebb67c0 }
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xffffcceb1dd0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40 }
-0xffffcceb1ea0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40, 0xffffcceb1dd0 }
-0xffffcceb1dd0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40, 0xffffcceb1dd0, 0xffffcceb1ea0 }
-Next token is token 'p' (0xffffcceb1ea0 'p'Exception caught: cleaning lookahead and stack
-0xaaaac5fb5f40->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40, 0xffffcceb1ea0 }
-0xaaaac5fb5f20->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1ea0 }
-0xaaaac5fb5f00->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1ea0 }
-0xaaaac5fb5ee0->Object::~Object { 0xaaaac5fb5ee0, 0xffffcceb1ea0 }
-0xffffcceb1ea0->Object::~Object { 0xffffcceb1ea0 }
+0xffffdebb66d0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40 }
+0xffffdebb67a0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40, 0xffffdebb66d0 }
+0xffffdebb66d0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40, 0xffffdebb66d0, 0xffffdebb67a0 }
+Next token is token 'p' (0xffffdebb67a0 'p'Exception caught: cleaning lookahead and stack
+0xaaaae0316f40->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40, 0xffffdebb67a0 }
+0xaaaae0316f20->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb67a0 }
+0xaaaae0316f00->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb67a0 }
+0xaaaae0316ee0->Object::~Object { 0xaaaae0316ee0, 0xffffdebb67a0 }
+0xffffdebb67a0->Object::~Object { 0xffffdebb67a0 }
 exception caught: printer
 end { }
 ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -254992,123 +254581,123 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xffffcceb1dd0->Object::Object { }
-0xffffcceb1ea0->Object::Object { 0xffffcceb1dd0 }
-0xffffcceb1dd0->Object::~Object { 0xffffcceb1dd0, 0xffffcceb1ea0 }
-Next token is token 'a' (0xffffcceb1ea0 'a')
-0xffffcceb1de0->Object::Object { 0xffffcceb1ea0 }
-0xffffcceb1d90->Object::Object { 0xffffcceb1de0, 0xffffcceb1ea0 }
-0xffffcceb1d90->Object::~Object { 0xffffcceb1d90, 0xffffcceb1de0, 0xffffcceb1ea0 }
-0xffffcceb1ea0->Object::~Object { 0xffffcceb1de0, 0xffffcceb1ea0 }
-Shifting token 'a' (0xffffcceb1de0 'a')
-0xaaaac5fb5ee0->Object::Object { 0xffffcceb1de0 }
-0xffffcceb1d68->Object::Object { 0xaaaac5fb5ee0, 0xffffcceb1de0 }
-0xffffcceb1d68->Object::~Object { 0xaaaac5fb5ee0, 0xffffcceb1d68, 0xffffcceb1de0 }
-0xffffcceb1de0->Object::~Object { 0xaaaac5fb5ee0, 0xffffcceb1de0 }
+0xffffdebb66d0->Object::Object { }
+0xffffdebb67a0->Object::Object { 0xffffdebb66d0 }
+0xffffdebb66d0->Object::~Object { 0xffffdebb66d0, 0xffffdebb67a0 }
+Next token is token 'a' (0xffffdebb67a0 'a')
+0xffffdebb66e0->Object::Object { 0xffffdebb67a0 }
+0xffffdebb6690->Object::Object { 0xffffdebb66e0, 0xffffdebb67a0 }
+0xffffdebb6690->Object::~Object { 0xffffdebb6690, 0xffffdebb66e0, 0xffffdebb67a0 }
+0xffffdebb67a0->Object::~Object { 0xffffdebb66e0, 0xffffdebb67a0 }
+Shifting token 'a' (0xffffdebb66e0 'a')
+0xaaaae0316ee0->Object::Object { 0xffffdebb66e0 }
+0xffffdebb6668->Object::Object { 0xaaaae0316ee0, 0xffffdebb66e0 }
+0xffffdebb6668->Object::~Object { 0xaaaae0316ee0, 0xffffdebb6668, 0xffffdebb66e0 }
+0xffffdebb66e0->Object::~Object { 0xaaaae0316ee0, 0xffffdebb66e0 }
 Entering state 2
 Stack now 0 2
-0xffffcceb1ec0->Object::Object { 0xaaaac5fb5ee0 }
+0xffffdebb67c0->Object::Object { 0xaaaae0316ee0 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaac5fb5ee0 'a')
--> $$ = nterm item (0xffffcceb1ec0 'a')
-0xaaaac5fb5ee0->Object::~Object { 0xaaaac5fb5ee0, 0xffffcceb1ec0 }
-0xaaaac5fb5ee0->Object::Object { 0xffffcceb1ec0 }
-0xffffcceb1e70->Object::Object { 0xaaaac5fb5ee0, 0xffffcceb1ec0 }
-0xffffcceb1e70->Object::~Object { 0xaaaac5fb5ee0, 0xffffcceb1e70, 0xffffcceb1ec0 }
-0xffffcceb1ec0->Object::~Object { 0xaaaac5fb5ee0, 0xffffcceb1ec0 }
+   $1 = token 'a' (0xaaaae0316ee0 'a')
+-> $$ = nterm item (0xffffdebb67c0 'a')
+0xaaaae0316ee0->Object::~Object { 0xaaaae0316ee0, 0xffffdebb67c0 }
+0xaaaae0316ee0->Object::Object { 0xffffdebb67c0 }
+0xffffdebb6770->Object::Object { 0xaaaae0316ee0, 0xffffdebb67c0 }
+0xffffdebb6770->Object::~Object { 0xaaaae0316ee0, 0xffffdebb6770, 0xffffdebb67c0 }
+0xffffdebb67c0->Object::~Object { 0xaaaae0316ee0, 0xffffdebb67c0 }
 Entering state 11
 Stack now 0 11
 Reading a token
-0xffffcceb1dd0->Object::Object { 0xaaaac5fb5ee0 }
-0xffffcceb1ea0->Object::Object { 0xaaaac5fb5ee0, 0xffffcceb1dd0 }
-0xffffcceb1dd0->Object::~Object { 0xaaaac5fb5ee0, 0xffffcceb1dd0, 0xffffcceb1ea0 }
-Next token is token 'a' (0xffffcceb1ea0 'a')
-0xffffcceb1de0->Object::Object { 0xaaaac5fb5ee0, 0xffffcceb1ea0 }
-0xffffcceb1d90->Object::Object { 0xaaaac5fb5ee0, 0xffffcceb1de0, 0xffffcceb1ea0 }
-0xffffcceb1d90->Object::~Object { 0xaaaac5fb5ee0, 0xffffcceb1d90, 0xffffcceb1de0, 0xffffcceb1ea0 }
-0xffffcceb1ea0->Object::~Object { 0xaaaac5fb5ee0, 0xffffcceb1de0, 0xffffcceb1ea0 }
-Shifting token 'a' (0xffffcceb1de0 'a')
-0xaaaac5fb5f00->Object::Object { 0xaaaac5fb5ee0, 0xffffcceb1de0 }
-0xffffcceb1d68->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1de0 }
-0xffffcceb1d68->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1d68, 0xffffcceb1de0 }
-0xffffcceb1de0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1de0 }
+0xffffdebb66d0->Object::Object { 0xaaaae0316ee0 }
+0xffffdebb67a0->Object::Object { 0xaaaae0316ee0, 0xffffdebb66d0 }
+0xffffdebb66d0->Object::~Object { 0xaaaae0316ee0, 0xffffdebb66d0, 0xffffdebb67a0 }
+Next token is token 'a' (0xffffdebb67a0 'a')
+0xffffdebb66e0->Object::Object { 0xaaaae0316ee0, 0xffffdebb67a0 }
+0xffffdebb6690->Object::Object { 0xaaaae0316ee0, 0xffffdebb66e0, 0xffffdebb67a0 }
+0xffffdebb6690->Object::~Object { 0xaaaae0316ee0, 0xffffdebb6690, 0xffffdebb66e0, 0xffffdebb67a0 }
+0xffffdebb67a0->Object::~Object { 0xaaaae0316ee0, 0xffffdebb66e0, 0xffffdebb67a0 }
+Shifting token 'a' (0xffffdebb66e0 'a')
+0xaaaae0316f00->Object::Object { 0xaaaae0316ee0, 0xffffdebb66e0 }
+0xffffdebb6668->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb66e0 }
+0xffffdebb6668->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb6668, 0xffffdebb66e0 }
+0xffffdebb66e0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb66e0 }
 Entering state 2
 Stack now 0 11 2
-0xffffcceb1ec0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00 }
+0xffffdebb67c0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaac5fb5f00 'a')
--> $$ = nterm item (0xffffcceb1ec0 'a')
-0xaaaac5fb5f00->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1ec0 }
-0xaaaac5fb5f00->Object::Object { 0xaaaac5fb5ee0, 0xffffcceb1ec0 }
-0xffffcceb1e70->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1ec0 }
-0xffffcceb1e70->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1e70, 0xffffcceb1ec0 }
-0xffffcceb1ec0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1ec0 }
+   $1 = token 'a' (0xaaaae0316f00 'a')
+-> $$ = nterm item (0xffffdebb67c0 'a')
+0xaaaae0316f00->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb67c0 }
+0xaaaae0316f00->Object::Object { 0xaaaae0316ee0, 0xffffdebb67c0 }
+0xffffdebb6770->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb67c0 }
+0xffffdebb6770->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb6770, 0xffffdebb67c0 }
+0xffffdebb67c0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb67c0 }
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xffffcceb1dd0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00 }
-0xffffcceb1ea0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1dd0 }
-0xffffcceb1dd0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1dd0, 0xffffcceb1ea0 }
-Next token is token 'a' (0xffffcceb1ea0 'a')
-0xffffcceb1de0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1ea0 }
-0xffffcceb1d90->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1de0, 0xffffcceb1ea0 }
-0xffffcceb1d90->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1d90, 0xffffcceb1de0, 0xffffcceb1ea0 }
-0xffffcceb1ea0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1de0, 0xffffcceb1ea0 }
-Shifting token 'a' (0xffffcceb1de0 'a')
-0xaaaac5fb5f20->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1de0 }
-0xffffcceb1d68->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1de0 }
-0xffffcceb1d68->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1d68, 0xffffcceb1de0 }
-0xffffcceb1de0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1de0 }
+0xffffdebb66d0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00 }
+0xffffdebb67a0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb66d0 }
+0xffffdebb66d0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb66d0, 0xffffdebb67a0 }
+Next token is token 'a' (0xffffdebb67a0 'a')
+0xffffdebb66e0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb67a0 }
+0xffffdebb6690->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb66e0, 0xffffdebb67a0 }
+0xffffdebb6690->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb6690, 0xffffdebb66e0, 0xffffdebb67a0 }
+0xffffdebb67a0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb66e0, 0xffffdebb67a0 }
+Shifting token 'a' (0xffffdebb66e0 'a')
+0xaaaae0316f20->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb66e0 }
+0xffffdebb6668->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb66e0 }
+0xffffdebb6668->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb6668, 0xffffdebb66e0 }
+0xffffdebb66e0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb66e0 }
 Entering state 2
 Stack now 0 11 11 2
-0xffffcceb1ec0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20 }
+0xffffdebb67c0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaac5fb5f20 'a')
--> $$ = nterm item (0xffffcceb1ec0 'a')
-0xaaaac5fb5f20->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1ec0 }
-0xaaaac5fb5f20->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1ec0 }
-0xffffcceb1e70->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1ec0 }
-0xffffcceb1e70->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1e70, 0xffffcceb1ec0 }
-0xffffcceb1ec0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1ec0 }
+   $1 = token 'a' (0xaaaae0316f20 'a')
+-> $$ = nterm item (0xffffdebb67c0 'a')
+0xaaaae0316f20->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb67c0 }
+0xaaaae0316f20->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb67c0 }
+0xffffdebb6770->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb67c0 }
+0xffffdebb6770->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb6770, 0xffffdebb67c0 }
+0xffffdebb67c0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb67c0 }
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xffffcceb1dd0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20 }
-0xffffcceb1ea0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1dd0 }
-0xffffcceb1dd0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1dd0, 0xffffcceb1ea0 }
-Next token is token 'a' (0xffffcceb1ea0 'a')
-0xffffcceb1de0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1ea0 }
-0xffffcceb1d90->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1de0, 0xffffcceb1ea0 }
-0xffffcceb1d90->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1d90, 0xffffcceb1de0, 0xffffcceb1ea0 }
-0xffffcceb1ea0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1de0, 0xffffcceb1ea0 }
-Shifting token 'a' (0xffffcceb1de0 'a')
-0xaaaac5fb5f40->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1de0 }
-0xffffcceb1d68->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40, 0xffffcceb1de0 }
-0xffffcceb1d68->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40, 0xffffcceb1d68, 0xffffcceb1de0 }
-0xffffcceb1de0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40, 0xffffcceb1de0 }
+0xffffdebb66d0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20 }
+0xffffdebb67a0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb66d0 }
+0xffffdebb66d0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb66d0, 0xffffdebb67a0 }
+Next token is token 'a' (0xffffdebb67a0 'a')
+0xffffdebb66e0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb67a0 }
+0xffffdebb6690->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb66e0, 0xffffdebb67a0 }
+0xffffdebb6690->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb6690, 0xffffdebb66e0, 0xffffdebb67a0 }
+0xffffdebb67a0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb66e0, 0xffffdebb67a0 }
+Shifting token 'a' (0xffffdebb66e0 'a')
+0xaaaae0316f40->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb66e0 }
+0xffffdebb6668->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40, 0xffffdebb66e0 }
+0xffffdebb6668->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40, 0xffffdebb6668, 0xffffdebb66e0 }
+0xffffdebb66e0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40, 0xffffdebb66e0 }
 Entering state 2
 Stack now 0 11 11 11 2
-0xffffcceb1ec0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40 }
+0xffffdebb67c0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaac5fb5f40 'a')
--> $$ = nterm item (0xffffcceb1ec0 'a')
-0xaaaac5fb5f40->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40, 0xffffcceb1ec0 }
-0xaaaac5fb5f40->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1ec0 }
-0xffffcceb1e70->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40, 0xffffcceb1ec0 }
-0xffffcceb1e70->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40, 0xffffcceb1e70, 0xffffcceb1ec0 }
-0xffffcceb1ec0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40, 0xffffcceb1ec0 }
+   $1 = token 'a' (0xaaaae0316f40 'a')
+-> $$ = nterm item (0xffffdebb67c0 'a')
+0xaaaae0316f40->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40, 0xffffdebb67c0 }
+0xaaaae0316f40->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb67c0 }
+0xffffdebb6770->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40, 0xffffdebb67c0 }
+0xffffdebb6770->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40, 0xffffdebb6770, 0xffffdebb67c0 }
+0xffffdebb67c0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40, 0xffffdebb67c0 }
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xffffcceb1dd0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40 }
-0xffffcceb1ea0->Object::Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40, 0xffffcceb1dd0 }
-0xffffcceb1dd0->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40, 0xffffcceb1dd0, 0xffffcceb1ea0 }
-Next token is token 'p' (0xffffcceb1ea0 'p'Exception caught: cleaning lookahead and stack
-0xaaaac5fb5f40->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xaaaac5fb5f40, 0xffffcceb1ea0 }
-0xaaaac5fb5f20->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xaaaac5fb5f20, 0xffffcceb1ea0 }
-0xaaaac5fb5f00->Object::~Object { 0xaaaac5fb5ee0, 0xaaaac5fb5f00, 0xffffcceb1ea0 }
-0xaaaac5fb5ee0->Object::~Object { 0xaaaac5fb5ee0, 0xffffcceb1ea0 }
-0xffffcceb1ea0->Object::~Object { 0xffffcceb1ea0 }
+0xffffdebb66d0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40 }
+0xffffdebb67a0->Object::Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40, 0xffffdebb66d0 }
+0xffffdebb66d0->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40, 0xffffdebb66d0, 0xffffdebb67a0 }
+Next token is token 'p' (0xffffdebb67a0 'p'Exception caught: cleaning lookahead and stack
+0xaaaae0316f40->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xaaaae0316f40, 0xffffdebb67a0 }
+0xaaaae0316f20->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xaaaae0316f20, 0xffffdebb67a0 }
+0xaaaae0316f00->Object::~Object { 0xaaaae0316ee0, 0xaaaae0316f00, 0xffffdebb67a0 }
+0xaaaae0316ee0->Object::~Object { 0xaaaae0316ee0, 0xffffdebb67a0 }
+0xffffdebb67a0->Object::~Object { 0xffffdebb67a0 }
 exception caught: printer
 end { }
 ./c++.at:1362: grep '^exception caught: printer$' stderr
@@ -255116,9 +254705,15 @@
 exception caught: printer
 ./c++.at:1362:  $PREPARSER ./input aaaae
 stderr:
+stdout:
+./c++.at:857:  $PREPARSER ./input
+stderr:
+stderr:
 exception caught: syntax error
 ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1362:  $PREPARSER ./input aaaaE
+./c++.at:858: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
 stderr:
 exception caught: syntax error, unexpected end of file, expecting 'a'
 ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -255130,6 +254725,17 @@
 ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:1362: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+======== Testing with C++ standard flags: ''
+./c++.at:858: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./c++.at:1363: ./exceptions || exit 77
+stderr:
+Inner caught
+Outer caught
+./c++.at:1363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc --report=all input.yy
+======== Testing with C++ standard flags: ''
+./c++.at:1363: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./c++.at:1064:  $PREPARSER ./input < in
@@ -255148,15 +254754,13 @@
 error: invalid character
 ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 682. c++.at:1064:  ok
-
 stderr:
 stdout:
 ./c++.at:1361:  $PREPARSER ./input aaaas
 stderr:
-689. c++.at:1371: testing C++ GLR parser identifier shadowing ...
 exception caught: reduction
 ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
+
 ./c++.at:1361:  $PREPARSER ./input aaaal
 stderr:
 exception caught: yylex
@@ -255169,64 +254773,62 @@
 stderr:
 ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1361:  $PREPARSER ./input --debug aaaap
-======== Testing with C++ standard flags: ''
-./c++.at:1411: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-0xaaab12ab3b40->Object::Object { }
-Next token is token 'a' (0xaaab12ab3b40 'a')
-Shifting token 'a' (0xaaab12ab3b40 'a')
+0xaaaafe25bb40->Object::Object { }
+Next token is token 'a' (0xaaaafe25bb40 'a')
+Shifting token 'a' (0xaaaafe25bb40 'a')
 Entering state 1
 Stack now 0 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab12ab3b40 'a')
--> $$ = nterm item (0xaaab12ab3b40 'a')
+   $1 = token 'a' (0xaaaafe25bb40 'a')
+-> $$ = nterm item (0xaaaafe25bb40 'a')
 Entering state 10
 Stack now 0 10
 Reading a token
-0xaaab12ab3b90->Object::Object { 0xaaab12ab3b40 }
-Next token is token 'a' (0xaaab12ab3b90 'a')
-Shifting token 'a' (0xaaab12ab3b90 'a')
+0xaaaafe25bb90->Object::Object { 0xaaaafe25bb40 }
+Next token is token 'a' (0xaaaafe25bb90 'a')
+Shifting token 'a' (0xaaaafe25bb90 'a')
 Entering state 1
 Stack now 0 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab12ab3b90 'a')
--> $$ = nterm item (0xaaab12ab3b90 'a')
+   $1 = token 'a' (0xaaaafe25bb90 'a')
+-> $$ = nterm item (0xaaaafe25bb90 'a')
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xaaab12ab3be0->Object::Object { 0xaaab12ab3b40, 0xaaab12ab3b90 }
-Next token is token 'a' (0xaaab12ab3be0 'a')
-Shifting token 'a' (0xaaab12ab3be0 'a')
+0xaaaafe25bbe0->Object::Object { 0xaaaafe25bb40, 0xaaaafe25bb90 }
+Next token is token 'a' (0xaaaafe25bbe0 'a')
+Shifting token 'a' (0xaaaafe25bbe0 'a')
 Entering state 1
 Stack now 0 10 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab12ab3be0 'a')
--> $$ = nterm item (0xaaab12ab3be0 'a')
+   $1 = token 'a' (0xaaaafe25bbe0 'a')
+-> $$ = nterm item (0xaaaafe25bbe0 'a')
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xaaab12ab3c30->Object::Object { 0xaaab12ab3b40, 0xaaab12ab3b90, 0xaaab12ab3be0 }
-Next token is token 'a' (0xaaab12ab3c30 'a')
-Shifting token 'a' (0xaaab12ab3c30 'a')
+0xaaaafe25bc30->Object::Object { 0xaaaafe25bb40, 0xaaaafe25bb90, 0xaaaafe25bbe0 }
+Next token is token 'a' (0xaaaafe25bc30 'a')
+Shifting token 'a' (0xaaaafe25bc30 'a')
 Entering state 1
 Stack now 0 10 10 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab12ab3c30 'a')
--> $$ = nterm item (0xaaab12ab3c30 'a')
+   $1 = token 'a' (0xaaaafe25bc30 'a')
+-> $$ = nterm item (0xaaaafe25bc30 'a')
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xaaab12ab3c80->Object::Object { 0xaaab12ab3b40, 0xaaab12ab3b90, 0xaaab12ab3be0, 0xaaab12ab3c30 }
-Next token is token 'p' (0xaaab12ab3c80 'p'Exception caught: cleaning lookahead and stack
-0xaaab12ab3c80->Object::~Object { 0xaaab12ab3b40, 0xaaab12ab3b90, 0xaaab12ab3be0, 0xaaab12ab3c30, 0xaaab12ab3c80 }
-0xaaab12ab3c30->Object::~Object { 0xaaab12ab3b40, 0xaaab12ab3b90, 0xaaab12ab3be0, 0xaaab12ab3c30 }
-0xaaab12ab3be0->Object::~Object { 0xaaab12ab3b40, 0xaaab12ab3b90, 0xaaab12ab3be0 }
-0xaaab12ab3b90->Object::~Object { 0xaaab12ab3b40, 0xaaab12ab3b90 }
-0xaaab12ab3b40->Object::~Object { 0xaaab12ab3b40 }
+0xaaaafe25bc80->Object::Object { 0xaaaafe25bb40, 0xaaaafe25bb90, 0xaaaafe25bbe0, 0xaaaafe25bc30 }
+Next token is token 'p' (0xaaaafe25bc80 'p'Exception caught: cleaning lookahead and stack
+0xaaaafe25bc80->Object::~Object { 0xaaaafe25bb40, 0xaaaafe25bb90, 0xaaaafe25bbe0, 0xaaaafe25bc30, 0xaaaafe25bc80 }
+0xaaaafe25bc30->Object::~Object { 0xaaaafe25bb40, 0xaaaafe25bb90, 0xaaaafe25bbe0, 0xaaaafe25bc30 }
+0xaaaafe25bbe0->Object::~Object { 0xaaaafe25bb40, 0xaaaafe25bb90, 0xaaaafe25bbe0 }
+0xaaaafe25bb90->Object::~Object { 0xaaaafe25bb40, 0xaaaafe25bb90 }
+0xaaaafe25bb40->Object::~Object { 0xaaaafe25bb40 }
 exception caught: printer
 end { }
 ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -255235,57 +254837,57 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xaaab12ab3b40->Object::Object { }
-Next token is token 'a' (0xaaab12ab3b40 'a')
-Shifting token 'a' (0xaaab12ab3b40 'a')
+0xaaaafe25bb40->Object::Object { }
+Next token is token 'a' (0xaaaafe25bb40 'a')
+Shifting token 'a' (0xaaaafe25bb40 'a')
 Entering state 1
 Stack now 0 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab12ab3b40 'a')
--> $$ = nterm item (0xaaab12ab3b40 'a')
+   $1 = token 'a' (0xaaaafe25bb40 'a')
+-> $$ = nterm item (0xaaaafe25bb40 'a')
 Entering state 10
 Stack now 0 10
 Reading a token
-0xaaab12ab3b90->Object::Object { 0xaaab12ab3b40 }
-Next token is token 'a' (0xaaab12ab3b90 'a')
-Shifting token 'a' (0xaaab12ab3b90 'a')
+0xaaaafe25bb90->Object::Object { 0xaaaafe25bb40 }
+Next token is token 'a' (0xaaaafe25bb90 'a')
+Shifting token 'a' (0xaaaafe25bb90 'a')
 Entering state 1
 Stack now 0 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab12ab3b90 'a')
--> $$ = nterm item (0xaaab12ab3b90 'a')
+   $1 = token 'a' (0xaaaafe25bb90 'a')
+-> $$ = nterm item (0xaaaafe25bb90 'a')
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xaaab12ab3be0->Object::Object { 0xaaab12ab3b40, 0xaaab12ab3b90 }
-Next token is token 'a' (0xaaab12ab3be0 'a')
-Shifting token 'a' (0xaaab12ab3be0 'a')
+0xaaaafe25bbe0->Object::Object { 0xaaaafe25bb40, 0xaaaafe25bb90 }
+Next token is token 'a' (0xaaaafe25bbe0 'a')
+Shifting token 'a' (0xaaaafe25bbe0 'a')
 Entering state 1
 Stack now 0 10 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab12ab3be0 'a')
--> $$ = nterm item (0xaaab12ab3be0 'a')
+   $1 = token 'a' (0xaaaafe25bbe0 'a')
+-> $$ = nterm item (0xaaaafe25bbe0 'a')
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xaaab12ab3c30->Object::Object { 0xaaab12ab3b40, 0xaaab12ab3b90, 0xaaab12ab3be0 }
-Next token is token 'a' (0xaaab12ab3c30 'a')
-Shifting token 'a' (0xaaab12ab3c30 'a')
+0xaaaafe25bc30->Object::Object { 0xaaaafe25bb40, 0xaaaafe25bb90, 0xaaaafe25bbe0 }
+Next token is token 'a' (0xaaaafe25bc30 'a')
+Shifting token 'a' (0xaaaafe25bc30 'a')
 Entering state 1
 Stack now 0 10 10 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab12ab3c30 'a')
--> $$ = nterm item (0xaaab12ab3c30 'a')
+   $1 = token 'a' (0xaaaafe25bc30 'a')
+-> $$ = nterm item (0xaaaafe25bc30 'a')
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xaaab12ab3c80->Object::Object { 0xaaab12ab3b40, 0xaaab12ab3b90, 0xaaab12ab3be0, 0xaaab12ab3c30 }
-Next token is token 'p' (0xaaab12ab3c80 'p'Exception caught: cleaning lookahead and stack
-0xaaab12ab3c80->Object::~Object { 0xaaab12ab3b40, 0xaaab12ab3b90, 0xaaab12ab3be0, 0xaaab12ab3c30, 0xaaab12ab3c80 }
-0xaaab12ab3c30->Object::~Object { 0xaaab12ab3b40, 0xaaab12ab3b90, 0xaaab12ab3be0, 0xaaab12ab3c30 }
-0xaaab12ab3be0->Object::~Object { 0xaaab12ab3b40, 0xaaab12ab3b90, 0xaaab12ab3be0 }
-0xaaab12ab3b90->Object::~Object { 0xaaab12ab3b40, 0xaaab12ab3b90 }
-0xaaab12ab3b40->Object::~Object { 0xaaab12ab3b40 }
+0xaaaafe25bc80->Object::Object { 0xaaaafe25bb40, 0xaaaafe25bb90, 0xaaaafe25bbe0, 0xaaaafe25bc30 }
+Next token is token 'p' (0xaaaafe25bc80 'p'Exception caught: cleaning lookahead and stack
+0xaaaafe25bc80->Object::~Object { 0xaaaafe25bb40, 0xaaaafe25bb90, 0xaaaafe25bbe0, 0xaaaafe25bc30, 0xaaaafe25bc80 }
+0xaaaafe25bc30->Object::~Object { 0xaaaafe25bb40, 0xaaaafe25bb90, 0xaaaafe25bbe0, 0xaaaafe25bc30 }
+0xaaaafe25bbe0->Object::~Object { 0xaaaafe25bb40, 0xaaaafe25bb90, 0xaaaafe25bbe0 }
+0xaaaafe25bb90->Object::~Object { 0xaaaafe25bb40, 0xaaaafe25bb90 }
+0xaaaafe25bb40->Object::~Object { 0xaaaafe25bb40 }
 exception caught: printer
 end { }
 ./c++.at:1361: grep '^exception caught: printer$' stderr
@@ -255295,6 +254897,8 @@
 stderr:
 exception caught: syntax error
 ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+689. c++.at:1371: testing C++ GLR parser identifier shadowing ...
+./c++.at:1410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
 ./c++.at:1361:  $PREPARSER ./input aaaaE
 stderr:
 exception caught: syntax error, unexpected end of file, expecting 'a'
@@ -255306,16 +254910,358 @@
 stderr:
 ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
+======== Testing with C++ standard flags: ''
 ./c++.at:1361: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./c++.at:1411: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:1363: ./exceptions || exit 77
+./c++.at:858:  $PREPARSER ./input
 stderr:
-Inner caught
-Outer caught
-./c++.at:1363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc --report=all input.yy
+./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:1363: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./c++.at:858: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./c++.at:1066:  $PREPARSER ./input < in
+stderr:
+error: invalid expression
+caught error
+error: invalid character
+caught error
+./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1066:  $PREPARSER ./input < in
+stderr:
+error: invalid expression
+./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1066:  $PREPARSER ./input < in
+stderr:
+error: invalid character
+./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+stderr:
+stdout:
+./c++.at:1066: ./check
+./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
+stderr:
+stdout:
+======== Testing with C++ standard flags: ''
+./c++.at:1411: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./c++.at:1362:  $PREPARSER ./input aaaas
+stderr:
+exception caught: reduction
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1362:  $PREPARSER ./input aaaal
+stderr:
+exception caught: yylex
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1362:  $PREPARSER ./input i
+stderr:
+exception caught: initial-action
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1362:  $PREPARSER ./input aaaap
+stderr:
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1362:  $PREPARSER ./input --debug aaaap
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+0xffffdec66e60->Object::Object { }
+0xffffdec66f30->Object::Object { 0xffffdec66e60 }
+0xffffdec66e60->Object::~Object { 0xffffdec66e60, 0xffffdec66f30 }
+Next token is token 'a' (0xffffdec66f30 'a')
+0xffffdec66e70->Object::Object { 0xffffdec66f30 }
+0xffffdec66e20->Object::Object { 0xffffdec66e70, 0xffffdec66f30 }
+0xffffdec66e20->Object::~Object { 0xffffdec66e20, 0xffffdec66e70, 0xffffdec66f30 }
+0xffffdec66f30->Object::~Object { 0xffffdec66e70, 0xffffdec66f30 }
+Shifting token 'a' (0xffffdec66e70 'a')
+0xaaab09f0fee0->Object::Object { 0xffffdec66e70 }
+0xffffdec66df8->Object::Object { 0xaaab09f0fee0, 0xffffdec66e70 }
+0xffffdec66df8->Object::~Object { 0xaaab09f0fee0, 0xffffdec66df8, 0xffffdec66e70 }
+0xffffdec66e70->Object::~Object { 0xaaab09f0fee0, 0xffffdec66e70 }
+Entering state 2
+Stack now 0 2
+0xffffdec66f50->Object::Object { 0xaaab09f0fee0 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaab09f0fee0 'a')
+-> $$ = nterm item (0xffffdec66f50 'a')
+0xaaab09f0fee0->Object::~Object { 0xaaab09f0fee0, 0xffffdec66f50 }
+0xaaab09f0fee0->Object::Object { 0xffffdec66f50 }
+0xffffdec66f00->Object::Object { 0xaaab09f0fee0, 0xffffdec66f50 }
+0xffffdec66f00->Object::~Object { 0xaaab09f0fee0, 0xffffdec66f00, 0xffffdec66f50 }
+0xffffdec66f50->Object::~Object { 0xaaab09f0fee0, 0xffffdec66f50 }
+Entering state 11
+Stack now 0 11
+Reading a token
+0xffffdec66e60->Object::Object { 0xaaab09f0fee0 }
+0xffffdec66f30->Object::Object { 0xaaab09f0fee0, 0xffffdec66e60 }
+0xffffdec66e60->Object::~Object { 0xaaab09f0fee0, 0xffffdec66e60, 0xffffdec66f30 }
+Next token is token 'a' (0xffffdec66f30 'a')
+0xffffdec66e70->Object::Object { 0xaaab09f0fee0, 0xffffdec66f30 }
+0xffffdec66e20->Object::Object { 0xaaab09f0fee0, 0xffffdec66e70, 0xffffdec66f30 }
+0xffffdec66e20->Object::~Object { 0xaaab09f0fee0, 0xffffdec66e20, 0xffffdec66e70, 0xffffdec66f30 }
+0xffffdec66f30->Object::~Object { 0xaaab09f0fee0, 0xffffdec66e70, 0xffffdec66f30 }
+Shifting token 'a' (0xffffdec66e70 'a')
+0xaaab09f0ff00->Object::Object { 0xaaab09f0fee0, 0xffffdec66e70 }
+0xffffdec66df8->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66e70 }
+0xffffdec66df8->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66df8, 0xffffdec66e70 }
+0xffffdec66e70->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66e70 }
+Entering state 2
+Stack now 0 11 2
+0xffffdec66f50->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaab09f0ff00 'a')
+-> $$ = nterm item (0xffffdec66f50 'a')
+0xaaab09f0ff00->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66f50 }
+0xaaab09f0ff00->Object::Object { 0xaaab09f0fee0, 0xffffdec66f50 }
+0xffffdec66f00->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66f50 }
+0xffffdec66f00->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66f00, 0xffffdec66f50 }
+0xffffdec66f50->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66f50 }
+Entering state 11
+Stack now 0 11 11
+Reading a token
+0xffffdec66e60->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00 }
+0xffffdec66f30->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66e60 }
+0xffffdec66e60->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66e60, 0xffffdec66f30 }
+Next token is token 'a' (0xffffdec66f30 'a')
+0xffffdec66e70->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66f30 }
+0xffffdec66e20->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66e70, 0xffffdec66f30 }
+0xffffdec66e20->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66e20, 0xffffdec66e70, 0xffffdec66f30 }
+0xffffdec66f30->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66e70, 0xffffdec66f30 }
+Shifting token 'a' (0xffffdec66e70 'a')
+0xaaab09f0ff20->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66e70 }
+0xffffdec66df8->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66e70 }
+0xffffdec66df8->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66df8, 0xffffdec66e70 }
+0xffffdec66e70->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66e70 }
+Entering state 2
+Stack now 0 11 11 2
+0xffffdec66f50->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaab09f0ff20 'a')
+-> $$ = nterm item (0xffffdec66f50 'a')
+0xaaab09f0ff20->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66f50 }
+0xaaab09f0ff20->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66f50 }
+0xffffdec66f00->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66f50 }
+0xffffdec66f00->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66f00, 0xffffdec66f50 }
+0xffffdec66f50->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66f50 }
+Entering state 11
+Stack now 0 11 11 11
+Reading a token
+0xffffdec66e60->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20 }
+0xffffdec66f30->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66e60 }
+0xffffdec66e60->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66e60, 0xffffdec66f30 }
+Next token is token 'a' (0xffffdec66f30 'a')
+0xffffdec66e70->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66f30 }
+0xffffdec66e20->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66e70, 0xffffdec66f30 }
+0xffffdec66e20->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66e20, 0xffffdec66e70, 0xffffdec66f30 }
+0xffffdec66f30->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66e70, 0xffffdec66f30 }
+Shifting token 'a' (0xffffdec66e70 'a')
+0xaaab09f0ff40->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66e70 }
+0xffffdec66df8->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40, 0xffffdec66e70 }
+0xffffdec66df8->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40, 0xffffdec66df8, 0xffffdec66e70 }
+0xffffdec66e70->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40, 0xffffdec66e70 }
+Entering state 2
+Stack now 0 11 11 11 2
+0xffffdec66f50->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaab09f0ff40 'a')
+-> $$ = nterm item (0xffffdec66f50 'a')
+0xaaab09f0ff40->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40, 0xffffdec66f50 }
+0xaaab09f0ff40->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66f50 }
+0xffffdec66f00->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40, 0xffffdec66f50 }
+0xffffdec66f00->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40, 0xffffdec66f00, 0xffffdec66f50 }
+0xffffdec66f50->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40, 0xffffdec66f50 }
+Entering state 11
+Stack now 0 11 11 11 11
+Reading a token
+0xffffdec66e60->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40 }
+0xffffdec66f30->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40, 0xffffdec66e60 }
+0xffffdec66e60->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40, 0xffffdec66e60, 0xffffdec66f30 }
+Next token is token 'p' (0xffffdec66f30 'p'Exception caught: cleaning lookahead and stack
+0xaaab09f0ff40->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40, 0xffffdec66f30 }
+0xaaab09f0ff20->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66f30 }
+0xaaab09f0ff00->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66f30 }
+0xaaab09f0fee0->Object::~Object { 0xaaab09f0fee0, 0xffffdec66f30 }
+0xffffdec66f30->Object::~Object { 0xffffdec66f30 }
+exception caught: printer
+end { }
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+0xffffdec66e60->Object::Object { }
+0xffffdec66f30->Object::Object { 0xffffdec66e60 }
+0xffffdec66e60->Object::~Object { 0xffffdec66e60, 0xffffdec66f30 }
+Next token is token 'a' (0xffffdec66f30 'a')
+0xffffdec66e70->Object::Object { 0xffffdec66f30 }
+0xffffdec66e20->Object::Object { 0xffffdec66e70, 0xffffdec66f30 }
+0xffffdec66e20->Object::~Object { 0xffffdec66e20, 0xffffdec66e70, 0xffffdec66f30 }
+0xffffdec66f30->Object::~Object { 0xffffdec66e70, 0xffffdec66f30 }
+Shifting token 'a' (0xffffdec66e70 'a')
+0xaaab09f0fee0->Object::Object { 0xffffdec66e70 }
+0xffffdec66df8->Object::Object { 0xaaab09f0fee0, 0xffffdec66e70 }
+0xffffdec66df8->Object::~Object { 0xaaab09f0fee0, 0xffffdec66df8, 0xffffdec66e70 }
+0xffffdec66e70->Object::~Object { 0xaaab09f0fee0, 0xffffdec66e70 }
+Entering state 2
+Stack now 0 2
+0xffffdec66f50->Object::Object { 0xaaab09f0fee0 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaab09f0fee0 'a')
+-> $$ = nterm item (0xffffdec66f50 'a')
+0xaaab09f0fee0->Object::~Object { 0xaaab09f0fee0, 0xffffdec66f50 }
+0xaaab09f0fee0->Object::Object { 0xffffdec66f50 }
+0xffffdec66f00->Object::Object { 0xaaab09f0fee0, 0xffffdec66f50 }
+0xffffdec66f00->Object::~Object { 0xaaab09f0fee0, 0xffffdec66f00, 0xffffdec66f50 }
+0xffffdec66f50->Object::~Object { 0xaaab09f0fee0, 0xffffdec66f50 }
+Entering state 11
+Stack now 0 11
+Reading a token
+0xffffdec66e60->Object::Object { 0xaaab09f0fee0 }
+0xffffdec66f30->Object::Object { 0xaaab09f0fee0, 0xffffdec66e60 }
+0xffffdec66e60->Object::~Object { 0xaaab09f0fee0, 0xffffdec66e60, 0xffffdec66f30 }
+Next token is token 'a' (0xffffdec66f30 'a')
+0xffffdec66e70->Object::Object { 0xaaab09f0fee0, 0xffffdec66f30 }
+0xffffdec66e20->Object::Object { 0xaaab09f0fee0, 0xffffdec66e70, 0xffffdec66f30 }
+0xffffdec66e20->Object::~Object { 0xaaab09f0fee0, 0xffffdec66e20, 0xffffdec66e70, 0xffffdec66f30 }
+0xffffdec66f30->Object::~Object { 0xaaab09f0fee0, 0xffffdec66e70, 0xffffdec66f30 }
+Shifting token 'a' (0xffffdec66e70 'a')
+0xaaab09f0ff00->Object::Object { 0xaaab09f0fee0, 0xffffdec66e70 }
+0xffffdec66df8->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66e70 }
+0xffffdec66df8->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66df8, 0xffffdec66e70 }
+0xffffdec66e70->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66e70 }
+Entering state 2
+Stack now 0 11 2
+0xffffdec66f50->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaab09f0ff00 'a')
+-> $$ = nterm item (0xffffdec66f50 'a')
+0xaaab09f0ff00->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66f50 }
+0xaaab09f0ff00->Object::Object { 0xaaab09f0fee0, 0xffffdec66f50 }
+0xffffdec66f00->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66f50 }
+0xffffdec66f00->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66f00, 0xffffdec66f50 }
+0xffffdec66f50->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66f50 }
+Entering state 11
+Stack now 0 11 11
+Reading a token
+0xffffdec66e60->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00 }
+0xffffdec66f30->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66e60 }
+0xffffdec66e60->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66e60, 0xffffdec66f30 }
+Next token is token 'a' (0xffffdec66f30 'a')
+0xffffdec66e70->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66f30 }
+0xffffdec66e20->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66e70, 0xffffdec66f30 }
+0xffffdec66e20->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66e20, 0xffffdec66e70, 0xffffdec66f30 }
+0xffffdec66f30->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66e70, 0xffffdec66f30 }
+Shifting token 'a' (0xffffdec66e70 'a')
+0xaaab09f0ff20->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66e70 }
+0xffffdec66df8->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66e70 }
+0xffffdec66df8->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66df8, 0xffffdec66e70 }
+0xffffdec66e70->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66e70 }
+Entering state 2
+Stack now 0 11 11 2
+0xffffdec66f50->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaab09f0ff20 'a')
+-> $$ = nterm item (0xffffdec66f50 'a')
+0xaaab09f0ff20->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66f50 }
+0xaaab09f0ff20->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66f50 }
+0xffffdec66f00->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66f50 }
+0xffffdec66f00->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66f00, 0xffffdec66f50 }
+0xffffdec66f50->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66f50 }
+Entering state 11
+Stack now 0 11 11 11
+Reading a token
+0xffffdec66e60->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20 }
+0xffffdec66f30->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66e60 }
+0xffffdec66e60->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66e60, 0xffffdec66f30 }
+Next token is token 'a' (0xffffdec66f30 'a')
+0xffffdec66e70->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66f30 }
+0xffffdec66e20->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66e70, 0xffffdec66f30 }
+0xffffdec66e20->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66e20, 0xffffdec66e70, 0xffffdec66f30 }
+0xffffdec66f30->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66e70, 0xffffdec66f30 }
+Shifting token 'a' (0xffffdec66e70 'a')
+0xaaab09f0ff40->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66e70 }
+0xffffdec66df8->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40, 0xffffdec66e70 }
+0xffffdec66df8->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40, 0xffffdec66df8, 0xffffdec66e70 }
+0xffffdec66e70->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40, 0xffffdec66e70 }
+Entering state 2
+Stack now 0 11 11 11 2
+0xffffdec66f50->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaab09f0ff40 'a')
+-> $$ = nterm item (0xffffdec66f50 'a')
+0xaaab09f0ff40->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40, 0xffffdec66f50 }
+0xaaab09f0ff40->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66f50 }
+0xffffdec66f00->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40, 0xffffdec66f50 }
+0xffffdec66f00->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40, 0xffffdec66f00, 0xffffdec66f50 }
+0xffffdec66f50->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40, 0xffffdec66f50 }
+Entering state 11
+Stack now 0 11 11 11 11
+Reading a token
+0xffffdec66e60->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40 }
+0xffffdec66f30->Object::Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40, 0xffffdec66e60 }
+0xffffdec66e60->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40, 0xffffdec66e60, 0xffffdec66f30 }
+Next token is token 'p' (0xffffdec66f30 'p'Exception caught: cleaning lookahead and stack
+0xaaab09f0ff40->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xaaab09f0ff40, 0xffffdec66f30 }
+0xaaab09f0ff20->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xaaab09f0ff20, 0xffffdec66f30 }
+0xaaab09f0ff00->Object::~Object { 0xaaab09f0fee0, 0xaaab09f0ff00, 0xffffdec66f30 }
+0xaaab09f0fee0->Object::~Object { 0xaaab09f0fee0, 0xffffdec66f30 }
+0xffffdec66f30->Object::~Object { 0xffffdec66f30 }
+exception caught: printer
+end { }
+./c++.at:1362: grep '^exception caught: printer$' stderr
+stdout:
+exception caught: printer
+./c++.at:1362:  $PREPARSER ./input aaaae
+stderr:
+exception caught: syntax error
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1362:  $PREPARSER ./input aaaaE
+stderr:
+exception caught: syntax error, unexpected end of file, expecting 'a'
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1362:  $PREPARSER ./input aaaaT
+stderr:
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1362:  $PREPARSER ./input aaaaR
+stderr:
+stderr:
+stdout:
+./c++.at:1065:  $PREPARSER ./input < in
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+error: invalid expression
+caught error
+error: invalid character
+caught error
+./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:1362: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./c++.at:1065:  $PREPARSER ./input < in
+stderr:
+error: invalid expression
+./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1065:  $PREPARSER ./input < in
+stderr:
+error: invalid character
+./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+683. c++.at:1065:  ok
+
+690. c++.at:1422: testing Shared locations ...
+./c++.at:1456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -o x1.cc x1.yy
+./c++.at:1456: $CXX $CPPFLAGS  $CXXFLAGS -Iinclude -c -o x1.o x1.cc 
+stderr:
+stdout:
+./c++.at:858:  $PREPARSER ./input
+stderr:
+./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:858: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./c++.at:1360:  $PREPARSER ./input aaaas
@@ -255339,57 +255285,57 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xaaaae6627b40->Object::Object { }
-Next token is token 'a' (0xaaaae6627b40 'a')
-Shifting token 'a' (0xaaaae6627b40 'a')
+0xaaaae34c5b40->Object::Object { }
+Next token is token 'a' (0xaaaae34c5b40 'a')
+Shifting token 'a' (0xaaaae34c5b40 'a')
 Entering state 2
 Stack now 0 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaae6627b40 'a')
--> $$ = nterm item (0xaaaae6627b40 'a')
+   $1 = token 'a' (0xaaaae34c5b40 'a')
+-> $$ = nterm item (0xaaaae34c5b40 'a')
 Entering state 11
 Stack now 0 11
 Reading a token
-0xaaaae6627b90->Object::Object { 0xaaaae6627b40 }
-Next token is token 'a' (0xaaaae6627b90 'a')
-Shifting token 'a' (0xaaaae6627b90 'a')
+0xaaaae34c5b90->Object::Object { 0xaaaae34c5b40 }
+Next token is token 'a' (0xaaaae34c5b90 'a')
+Shifting token 'a' (0xaaaae34c5b90 'a')
 Entering state 2
 Stack now 0 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaae6627b90 'a')
--> $$ = nterm item (0xaaaae6627b90 'a')
+   $1 = token 'a' (0xaaaae34c5b90 'a')
+-> $$ = nterm item (0xaaaae34c5b90 'a')
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xaaaae6627be0->Object::Object { 0xaaaae6627b40, 0xaaaae6627b90 }
-Next token is token 'a' (0xaaaae6627be0 'a')
-Shifting token 'a' (0xaaaae6627be0 'a')
+0xaaaae34c5be0->Object::Object { 0xaaaae34c5b40, 0xaaaae34c5b90 }
+Next token is token 'a' (0xaaaae34c5be0 'a')
+Shifting token 'a' (0xaaaae34c5be0 'a')
 Entering state 2
 Stack now 0 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaae6627be0 'a')
--> $$ = nterm item (0xaaaae6627be0 'a')
+   $1 = token 'a' (0xaaaae34c5be0 'a')
+-> $$ = nterm item (0xaaaae34c5be0 'a')
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xaaaae6627c30->Object::Object { 0xaaaae6627b40, 0xaaaae6627b90, 0xaaaae6627be0 }
-Next token is token 'a' (0xaaaae6627c30 'a')
-Shifting token 'a' (0xaaaae6627c30 'a')
+0xaaaae34c5c30->Object::Object { 0xaaaae34c5b40, 0xaaaae34c5b90, 0xaaaae34c5be0 }
+Next token is token 'a' (0xaaaae34c5c30 'a')
+Shifting token 'a' (0xaaaae34c5c30 'a')
 Entering state 2
 Stack now 0 11 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaae6627c30 'a')
--> $$ = nterm item (0xaaaae6627c30 'a')
+   $1 = token 'a' (0xaaaae34c5c30 'a')
+-> $$ = nterm item (0xaaaae34c5c30 'a')
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xaaaae6627c80->Object::Object { 0xaaaae6627b40, 0xaaaae6627b90, 0xaaaae6627be0, 0xaaaae6627c30 }
-Next token is token 'p' (0xaaaae6627c80 'p'Exception caught: cleaning lookahead and stack
-0xaaaae6627c80->Object::~Object { 0xaaaae6627b40, 0xaaaae6627b90, 0xaaaae6627be0, 0xaaaae6627c30, 0xaaaae6627c80 }
-0xaaaae6627c30->Object::~Object { 0xaaaae6627b40, 0xaaaae6627b90, 0xaaaae6627be0, 0xaaaae6627c30 }
-0xaaaae6627be0->Object::~Object { 0xaaaae6627b40, 0xaaaae6627b90, 0xaaaae6627be0 }
-0xaaaae6627b90->Object::~Object { 0xaaaae6627b40, 0xaaaae6627b90 }
-0xaaaae6627b40->Object::~Object { 0xaaaae6627b40 }
+0xaaaae34c5c80->Object::Object { 0xaaaae34c5b40, 0xaaaae34c5b90, 0xaaaae34c5be0, 0xaaaae34c5c30 }
+Next token is token 'p' (0xaaaae34c5c80 'p'Exception caught: cleaning lookahead and stack
+0xaaaae34c5c80->Object::~Object { 0xaaaae34c5b40, 0xaaaae34c5b90, 0xaaaae34c5be0, 0xaaaae34c5c30, 0xaaaae34c5c80 }
+0xaaaae34c5c30->Object::~Object { 0xaaaae34c5b40, 0xaaaae34c5b90, 0xaaaae34c5be0, 0xaaaae34c5c30 }
+0xaaaae34c5be0->Object::~Object { 0xaaaae34c5b40, 0xaaaae34c5b90, 0xaaaae34c5be0 }
+0xaaaae34c5b90->Object::~Object { 0xaaaae34c5b40, 0xaaaae34c5b90 }
+0xaaaae34c5b40->Object::~Object { 0xaaaae34c5b40 }
 exception caught: printer
 end { }
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -255398,57 +255344,57 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xaaaae6627b40->Object::Object { }
-Next token is token 'a' (0xaaaae6627b40 'a')
-Shifting token 'a' (0xaaaae6627b40 'a')
+0xaaaae34c5b40->Object::Object { }
+Next token is token 'a' (0xaaaae34c5b40 'a')
+Shifting token 'a' (0xaaaae34c5b40 'a')
 Entering state 2
 Stack now 0 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaae6627b40 'a')
--> $$ = nterm item (0xaaaae6627b40 'a')
+   $1 = token 'a' (0xaaaae34c5b40 'a')
+-> $$ = nterm item (0xaaaae34c5b40 'a')
 Entering state 11
 Stack now 0 11
 Reading a token
-0xaaaae6627b90->Object::Object { 0xaaaae6627b40 }
-Next token is token 'a' (0xaaaae6627b90 'a')
-Shifting token 'a' (0xaaaae6627b90 'a')
+0xaaaae34c5b90->Object::Object { 0xaaaae34c5b40 }
+Next token is token 'a' (0xaaaae34c5b90 'a')
+Shifting token 'a' (0xaaaae34c5b90 'a')
 Entering state 2
 Stack now 0 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaae6627b90 'a')
--> $$ = nterm item (0xaaaae6627b90 'a')
+   $1 = token 'a' (0xaaaae34c5b90 'a')
+-> $$ = nterm item (0xaaaae34c5b90 'a')
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xaaaae6627be0->Object::Object { 0xaaaae6627b40, 0xaaaae6627b90 }
-Next token is token 'a' (0xaaaae6627be0 'a')
-Shifting token 'a' (0xaaaae6627be0 'a')
+0xaaaae34c5be0->Object::Object { 0xaaaae34c5b40, 0xaaaae34c5b90 }
+Next token is token 'a' (0xaaaae34c5be0 'a')
+Shifting token 'a' (0xaaaae34c5be0 'a')
 Entering state 2
 Stack now 0 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaae6627be0 'a')
--> $$ = nterm item (0xaaaae6627be0 'a')
+   $1 = token 'a' (0xaaaae34c5be0 'a')
+-> $$ = nterm item (0xaaaae34c5be0 'a')
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xaaaae6627c30->Object::Object { 0xaaaae6627b40, 0xaaaae6627b90, 0xaaaae6627be0 }
-Next token is token 'a' (0xaaaae6627c30 'a')
-Shifting token 'a' (0xaaaae6627c30 'a')
+0xaaaae34c5c30->Object::Object { 0xaaaae34c5b40, 0xaaaae34c5b90, 0xaaaae34c5be0 }
+Next token is token 'a' (0xaaaae34c5c30 'a')
+Shifting token 'a' (0xaaaae34c5c30 'a')
 Entering state 2
 Stack now 0 11 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaae6627c30 'a')
--> $$ = nterm item (0xaaaae6627c30 'a')
+   $1 = token 'a' (0xaaaae34c5c30 'a')
+-> $$ = nterm item (0xaaaae34c5c30 'a')
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xaaaae6627c80->Object::Object { 0xaaaae6627b40, 0xaaaae6627b90, 0xaaaae6627be0, 0xaaaae6627c30 }
-Next token is token 'p' (0xaaaae6627c80 'p'Exception caught: cleaning lookahead and stack
-0xaaaae6627c80->Object::~Object { 0xaaaae6627b40, 0xaaaae6627b90, 0xaaaae6627be0, 0xaaaae6627c30, 0xaaaae6627c80 }
-0xaaaae6627c30->Object::~Object { 0xaaaae6627b40, 0xaaaae6627b90, 0xaaaae6627be0, 0xaaaae6627c30 }
-0xaaaae6627be0->Object::~Object { 0xaaaae6627b40, 0xaaaae6627b90, 0xaaaae6627be0 }
-0xaaaae6627b90->Object::~Object { 0xaaaae6627b40, 0xaaaae6627b90 }
-0xaaaae6627b40->Object::~Object { 0xaaaae6627b40 }
+0xaaaae34c5c80->Object::Object { 0xaaaae34c5b40, 0xaaaae34c5b90, 0xaaaae34c5be0, 0xaaaae34c5c30 }
+Next token is token 'p' (0xaaaae34c5c80 'p'Exception caught: cleaning lookahead and stack
+0xaaaae34c5c80->Object::~Object { 0xaaaae34c5b40, 0xaaaae34c5b90, 0xaaaae34c5be0, 0xaaaae34c5c30, 0xaaaae34c5c80 }
+0xaaaae34c5c30->Object::~Object { 0xaaaae34c5b40, 0xaaaae34c5b90, 0xaaaae34c5be0, 0xaaaae34c5c30 }
+0xaaaae34c5be0->Object::~Object { 0xaaaae34c5b40, 0xaaaae34c5b90, 0xaaaae34c5be0 }
+0xaaaae34c5b90->Object::~Object { 0xaaaae34c5b40, 0xaaaae34c5b90 }
+0xaaaae34c5b40->Object::~Object { 0xaaaae34c5b40 }
 exception caught: printer
 end { }
 ./c++.at:1360: grep '^exception caught: printer$' stderr
@@ -255472,209 +255418,388 @@
 ./c++.at:1360: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:858:  $PREPARSER ./input
-stderr:
-./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:858: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./c++.at:1065:  $PREPARSER ./input < in
-stderr:
-error: invalid expression
-caught error
-error: invalid character
-caught error
-./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1065:  $PREPARSER ./input < in
+./c++.at:1363:  $PREPARSER ./input aaaas
 stderr:
-error: invalid expression
-./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1065:  $PREPARSER ./input < in
+exception caught: reduction
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input aaaal
 stderr:
-error: invalid character
-./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-683. c++.at:1065:  ok
-
+exception caught: yylex
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stdout:
+./c++.at:1363:  $PREPARSER ./input i
 ======== Testing with C++ standard flags: ''
 ./c++.at:1411: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-690. c++.at:1422: testing Shared locations ...
-./c++.at:1456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -o x1.cc x1.yy
-./c++.at:1456: $CXX $CPPFLAGS  $CXXFLAGS -Iinclude -c -o x1.o x1.cc 
-stderr:
-stdout:
-./c++.at:858:  $PREPARSER ./input
-stderr:
-./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:858: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./c++.at:1066:  $PREPARSER ./input < in
-stderr:
-error: invalid expression
-caught error
-error: invalid character
-caught error
-./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1066:  $PREPARSER ./input < in
-stderr:
-error: invalid expression
-./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1066:  $PREPARSER ./input < in
-stderr:
-error: invalid character
-./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
-stdout:
-./c++.at:1066: ./check
-./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
 stderr:
 stdout:
+exception caught: initial-action
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1361:  $PREPARSER ./input aaaas
 stderr:
 exception caught: reduction
 ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input aaaap
+stderr:
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1361:  $PREPARSER ./input aaaal
+./c++.at:1363:  $PREPARSER ./input --debug aaaap
+stderr:
 stderr:
 exception caught: yylex
 ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+0xffffce5b0ef0->Object::Object { }
+0xffffce5b0fe0->Object::Object { 0xffffce5b0ef0 }
+0xffffce5b0ef0->Object::~Object { 0xffffce5b0ef0, 0xffffce5b0fe0 }
+Next token is token 'a' (0xffffce5b0fe0 'a')
+0xffffce5b0f00->Object::Object { 0xffffce5b0fe0 }
+0xffffce5b0fe0->Object::~Object { 0xffffce5b0f00, 0xffffce5b0fe0 }
+Shifting token 'a' (0xffffce5b0f00 'a')
+0xaaaaee12dee0->Object::Object { 0xffffce5b0f00 }
+0xffffce5b0f00->Object::~Object { 0xaaaaee12dee0, 0xffffce5b0f00 }
+Entering state 1
+Stack now 0 1
+0xffffce5b1000->Object::Object { 0xaaaaee12dee0 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaee12dee0 'a')
+-> $$ = nterm item (0xffffce5b1000 'a')
+0xaaaaee12dee0->Object::~Object { 0xaaaaee12dee0, 0xffffce5b1000 }
+0xaaaaee12dee0->Object::Object { 0xffffce5b1000 }
+0xffffce5b1000->Object::~Object { 0xaaaaee12dee0, 0xffffce5b1000 }
+Entering state 10
+Stack now 0 10
+Reading a token
+0xffffce5b0ef0->Object::Object { 0xaaaaee12dee0 }
+0xffffce5b0fe0->Object::Object { 0xaaaaee12dee0, 0xffffce5b0ef0 }
+0xffffce5b0ef0->Object::~Object { 0xaaaaee12dee0, 0xffffce5b0ef0, 0xffffce5b0fe0 }
+Next token is token 'a' (0xffffce5b0fe0 'a')
+0xffffce5b0f00->Object::Object { 0xaaaaee12dee0, 0xffffce5b0fe0 }
+0xffffce5b0fe0->Object::~Object { 0xaaaaee12dee0, 0xffffce5b0f00, 0xffffce5b0fe0 }
+Shifting token 'a' (0xffffce5b0f00 'a')
+0xaaaaee12df00->Object::Object { 0xaaaaee12dee0, 0xffffce5b0f00 }
+0xffffce5b0f00->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xffffce5b0f00 }
+Entering state 1
+Stack now 0 10 1
+0xffffce5b1000->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaee12df00 'a')
+-> $$ = nterm item (0xffffce5b1000 'a')
+0xaaaaee12df00->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xffffce5b1000 }
+0xaaaaee12df00->Object::Object { 0xaaaaee12dee0, 0xffffce5b1000 }
+0xffffce5b1000->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xffffce5b1000 }
+Entering state 10
+Stack now 0 10 10
+Reading a token
+0xffffce5b0ef0->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00 }
+0xffffce5b0fe0->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xffffce5b0ef0 }
+0xffffce5b0ef0->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xffffce5b0ef0, 0xffffce5b0fe0 }
+Next token is token 'a' (0xffffce5b0fe0 'a')
+0xffffce5b0f00->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xffffce5b0fe0 }
+0xffffce5b0fe0->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xffffce5b0f00, 0xffffce5b0fe0 }
+Shifting token 'a' (0xffffce5b0f00 'a')
+0xaaaaee12df20->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xffffce5b0f00 }
+0xffffce5b0f00->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xffffce5b0f00 }
+Entering state 1
+Stack now 0 10 10 1
+0xffffce5b1000->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaee12df20 'a')
+-> $$ = nterm item (0xffffce5b1000 'a')
+0xaaaaee12df20->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xffffce5b1000 }
+0xaaaaee12df20->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xffffce5b1000 }
+0xffffce5b1000->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xffffce5b1000 }
+Entering state 10
+Stack now 0 10 10 10
+Reading a token
+0xffffce5b0ef0->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20 }
+0xffffce5b0fe0->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xffffce5b0ef0 }
+0xffffce5b0ef0->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xffffce5b0ef0, 0xffffce5b0fe0 }
+Next token is token 'a' (0xffffce5b0fe0 'a')
+0xffffce5b0f00->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xffffce5b0fe0 }
+0xffffce5b0fe0->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xffffce5b0f00, 0xffffce5b0fe0 }
+Shifting token 'a' (0xffffce5b0f00 'a')
+0xaaaaee12df40->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xffffce5b0f00 }
+0xffffce5b0f00->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xaaaaee12df40, 0xffffce5b0f00 }
+Entering state 1
+Stack now 0 10 10 10 1
+0xffffce5b1000->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xaaaaee12df40 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaee12df40 'a')
+-> $$ = nterm item (0xffffce5b1000 'a')
+0xaaaaee12df40->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xaaaaee12df40, 0xffffce5b1000 }
+0xaaaaee12df40->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xffffce5b1000 }
+0xffffce5b1000->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xaaaaee12df40, 0xffffce5b1000 }
+Entering state 10
+Stack now 0 10 10 10 10
+Reading a token
+0xffffce5b0ef0->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xaaaaee12df40 }
+0xffffce5b0fe0->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xaaaaee12df40, 0xffffce5b0ef0 }
+0xffffce5b0ef0->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xaaaaee12df40, 0xffffce5b0ef0, 0xffffce5b0fe0 }
+Next token is token 'p' (0xffffce5b0fe0 'p'Exception caught: cleaning lookahead and stack
+0xaaaaee12df40->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xaaaaee12df40, 0xffffce5b0fe0 }
+0xaaaaee12df20->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xffffce5b0fe0 }
+0xaaaaee12df00->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xffffce5b0fe0 }
+0xaaaaee12dee0->Object::~Object { 0xaaaaee12dee0, 0xffffce5b0fe0 }
+0xffffce5b0fe0->Object::~Object { 0xffffce5b0fe0 }
+exception caught: printer
+end { }
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./c++.at:1361:  $PREPARSER ./input i
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+0xffffce5b0ef0->Object::Object { }
+0xffffce5b0fe0->Object::Object { 0xffffce5b0ef0 }
+0xffffce5b0ef0->Object::~Object { 0xffffce5b0ef0, 0xffffce5b0fe0 }
+Next token is token 'a' (0xffffce5b0fe0 'a')
+0xffffce5b0f00->Object::Object { 0xffffce5b0fe0 }
+0xffffce5b0fe0->Object::~Object { 0xffffce5b0f00, 0xffffce5b0fe0 }
+Shifting token 'a' (0xffffce5b0f00 'a')
+0xaaaaee12dee0->Object::Object { 0xffffce5b0f00 }
+0xffffce5b0f00->Object::~Object { 0xaaaaee12dee0, 0xffffce5b0f00 }
+Entering state 1
+Stack now 0 1
+0xffffce5b1000->Object::Object { 0xaaaaee12dee0 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaee12dee0 'a')
+-> $$ = nterm item (0xffffce5b1000 'a')
+0xaaaaee12dee0->Object::~Object { 0xaaaaee12dee0, 0xffffce5b1000 }
+0xaaaaee12dee0->Object::Object { 0xffffce5b1000 }
+0xffffce5b1000->Object::~Object { 0xaaaaee12dee0, 0xffffce5b1000 }
+Entering state 10
+Stack now 0 10
+Reading a token
+0xffffce5b0ef0->Object::Object { 0xaaaaee12dee0 }
+0xffffce5b0fe0->Object::Object { 0xaaaaee12dee0, 0xffffce5b0ef0 }
+0xffffce5b0ef0->Object::~Object { 0xaaaaee12dee0, 0xffffce5b0ef0, 0xffffce5b0fe0 }
+Next token is token 'a' (0xffffce5b0fe0 'a')
+0xffffce5b0f00->Object::Object { 0xaaaaee12dee0, 0xffffce5b0fe0 }
+0xffffce5b0fe0->Object::~Object { 0xaaaaee12dee0, 0xffffce5b0f00, 0xffffce5b0fe0 }
+Shifting token 'a' (0xffffce5b0f00 'a')
+0xaaaaee12df00->Object::Object { 0xaaaaee12dee0, 0xffffce5b0f00 }
+0xffffce5b0f00->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xffffce5b0f00 }
+Entering state 1
+Stack now 0 10 1
+0xffffce5b1000->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaee12df00 'a')
+-> $$ = nterm item (0xffffce5b1000 'a')
+0xaaaaee12df00->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xffffce5b1000 }
+0xaaaaee12df00->Object::Object { 0xaaaaee12dee0, 0xffffce5b1000 }
+0xffffce5b1000->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xffffce5b1000 }
+Entering state 10
+Stack now 0 10 10
+Reading a token
+0xffffce5b0ef0->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00 }
+0xffffce5b0fe0->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xffffce5b0ef0 }
+0xffffce5b0ef0->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xffffce5b0ef0, 0xffffce5b0fe0 }
+Next token is token 'a' (0xffffce5b0fe0 'a')
+0xffffce5b0f00->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xffffce5b0fe0 }
+0xffffce5b0fe0->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xffffce5b0f00, 0xffffce5b0fe0 }
+Shifting token 'a' (0xffffce5b0f00 'a')
+0xaaaaee12df20->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xffffce5b0f00 }
+0xffffce5b0f00->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xffffce5b0f00 }
+Entering state 1
+Stack now 0 10 10 1
+0xffffce5b1000->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaee12df20 'a')
+-> $$ = nterm item (0xffffce5b1000 'a')
+0xaaaaee12df20->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xffffce5b1000 }
+0xaaaaee12df20->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xffffce5b1000 }
+0xffffce5b1000->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xffffce5b1000 }
+Entering state 10
+Stack now 0 10 10 10
+Reading a token
+0xffffce5b0ef0->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20 }
+0xffffce5b0fe0->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xffffce5b0ef0 }
+0xffffce5b0ef0->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xffffce5b0ef0, 0xffffce5b0fe0 }
+Next token is token 'a' (0xffffce5b0fe0 'a')
+0xffffce5b0f00->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xffffce5b0fe0 }
+0xffffce5b0fe0->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xffffce5b0f00, 0xffffce5b0fe0 }
+Shifting token 'a' (0xffffce5b0f00 'a')
+0xaaaaee12df40->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xffffce5b0f00 }
+0xffffce5b0f00->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xaaaaee12df40, 0xffffce5b0f00 }
+Entering state 1
+Stack now 0 10 10 10 1
+0xffffce5b1000->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xaaaaee12df40 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaee12df40 'a')
+-> $$ = nterm item (0xffffce5b1000 'a')
+0xaaaaee12df40->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xaaaaee12df40, 0xffffce5b1000 }
+0xaaaaee12df40->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xffffce5b1000 }
+0xffffce5b1000->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xaaaaee12df40, 0xffffce5b1000 }
+Entering state 10
+Stack now 0 10 10 10 10
+Reading a token
+0xffffce5b0ef0->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xaaaaee12df40 }
+0xffffce5b0fe0->Object::Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xaaaaee12df40, 0xffffce5b0ef0 }
+0xffffce5b0ef0->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xaaaaee12df40, 0xffffce5b0ef0, 0xffffce5b0fe0 }
+Next token is token 'p' (0xffffce5b0fe0 'p'Exception caught: cleaning lookahead and stack
+0xaaaaee12df40->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xaaaaee12df40, 0xffffce5b0fe0 }
+0xaaaaee12df20->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xaaaaee12df20, 0xffffce5b0fe0 }
+0xaaaaee12df00->Object::~Object { 0xaaaaee12dee0, 0xaaaaee12df00, 0xffffce5b0fe0 }
+0xaaaaee12dee0->Object::~Object { 0xaaaaee12dee0, 0xffffce5b0fe0 }
+0xffffce5b0fe0->Object::~Object { 0xffffce5b0fe0 }
+exception caught: printer
+end { }
+./c++.at:1363: grep '^exception caught: printer$' stderr
+stdout:
 stderr:
+exception caught: printer
+./c++.at:1363:  $PREPARSER ./input aaaae
 exception caught: initial-action
 ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+exception caught: syntax error
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1361:  $PREPARSER ./input aaaap
 stderr:
+./c++.at:1363:  $PREPARSER ./input aaaaE
 ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+exception caught: syntax error, unexpected end of file, expecting 'a'
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input aaaaT
 ./c++.at:1361:  $PREPARSER ./input --debug aaaap
 stderr:
+stderr:
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-0xaaab0436db40->Object::Object { }
-Next token is token 'a' (0xaaab0436db40 'a')
-Shifting token 'a' (0xaaab0436db40 'a')
+0xaaaaeb156b40->Object::Object { }
+Next token is token 'a' (0xaaaaeb156b40 'a')
+Shifting token 'a' (0xaaaaeb156b40 'a')
 Entering state 1
 Stack now 0 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab0436db40 'a')
--> $$ = nterm item (0xaaab0436db40 'a')
+   $1 = token 'a' (0xaaaaeb156b40 'a')
+-> $$ = nterm item (0xaaaaeb156b40 'a')
 Entering state 10
 Stack now 0 10
 Reading a token
-0xaaab0436db90->Object::Object { 0xaaab0436db40 }
-Next token is token 'a' (0xaaab0436db90 'a')
-Shifting token 'a' (0xaaab0436db90 'a')
+0xaaaaeb156b90->Object::Object { 0xaaaaeb156b40 }
+Next token is token 'a' (0xaaaaeb156b90 'a')
+Shifting token 'a' (0xaaaaeb156b90 'a')
 Entering state 1
 Stack now 0 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab0436db90 'a')
--> $$ = nterm item (0xaaab0436db90 'a')
+   $1 = token 'a' (0xaaaaeb156b90 'a')
+-> $$ = nterm item (0xaaaaeb156b90 'a')
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xaaab0436dbe0->Object::Object { 0xaaab0436db40, 0xaaab0436db90 }
-Next token is token 'a' (0xaaab0436dbe0 'a')
-Shifting token 'a' (0xaaab0436dbe0 'a')
+0xaaaaeb156be0->Object::Object { 0xaaaaeb156b40, 0xaaaaeb156b90 }
+Next token is token 'a' (0xaaaaeb156be0 'a')
+Shifting token 'a' (0xaaaaeb156be0 'a')
 Entering state 1
 Stack now 0 10 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab0436dbe0 'a')
--> $$ = nterm item (0xaaab0436dbe0 'a')
+   $1 = token 'a' (0xaaaaeb156be0 'a')
+-> $$ = nterm item (0xaaaaeb156be0 'a')
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xaaab0436dc30->Object::Object { 0xaaab0436db40, 0xaaab0436db90, 0xaaab0436dbe0 }
-Next token is token 'a' (0xaaab0436dc30 'a')
-Shifting token 'a' (0xaaab0436dc30 'a')
+0xaaaaeb156c30->Object::Object { 0xaaaaeb156b40, 0xaaaaeb156b90, 0xaaaaeb156be0 }
+Next token is token 'a' (0xaaaaeb156c30 'a')
+Shifting token 'a' (0xaaaaeb156c30 'a')
 Entering state 1
 Stack now 0 10 10 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab0436dc30 'a')
--> $$ = nterm item (0xaaab0436dc30 'a')
+   $1 = token 'a' (0xaaaaeb156c30 'a')
+-> $$ = nterm item (0xaaaaeb156c30 'a')
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xaaab0436dc80->Object::Object { 0xaaab0436db40, 0xaaab0436db90, 0xaaab0436dbe0, 0xaaab0436dc30 }
-Next token is token 'p' (0xaaab0436dc80 'p'Exception caught: cleaning lookahead and stack
-0xaaab0436dc80->Object::~Object { 0xaaab0436db40, 0xaaab0436db90, 0xaaab0436dbe0, 0xaaab0436dc30, 0xaaab0436dc80 }
-0xaaab0436dc30->Object::~Object { 0xaaab0436db40, 0xaaab0436db90, 0xaaab0436dbe0, 0xaaab0436dc30 }
-0xaaab0436dbe0->Object::~Object { 0xaaab0436db40, 0xaaab0436db90, 0xaaab0436dbe0 }
-0xaaab0436db90->Object::~Object { 0xaaab0436db40, 0xaaab0436db90 }
-0xaaab0436db40->Object::~Object { 0xaaab0436db40 }
+0xaaaaeb156c80->Object::Object { 0xaaaaeb156b40, 0xaaaaeb156b90, 0xaaaaeb156be0, 0xaaaaeb156c30 }
+Next token is token 'p' (0xaaaaeb156c80 'p'Exception caught: cleaning lookahead and stack
+0xaaaaeb156c80->Object::~Object { 0xaaaaeb156b40, 0xaaaaeb156b90, 0xaaaaeb156be0, 0xaaaaeb156c30, 0xaaaaeb156c80 }
+0xaaaaeb156c30->Object::~Object { 0xaaaaeb156b40, 0xaaaaeb156b90, 0xaaaaeb156be0, 0xaaaaeb156c30 }
+0xaaaaeb156be0->Object::~Object { 0xaaaaeb156b40, 0xaaaaeb156b90, 0xaaaaeb156be0 }
+0xaaaaeb156b90->Object::~Object { 0xaaaaeb156b40, 0xaaaaeb156b90 }
+0xaaaaeb156b40->Object::~Object { 0xaaaaeb156b40 }
 exception caught: printer
 end { }
 ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+./c++.at:1363:  $PREPARSER ./input aaaaR
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-0xaaab0436db40->Object::Object { }
-Next token is token 'a' (0xaaab0436db40 'a')
-Shifting token 'a' (0xaaab0436db40 'a')
+0xaaaaeb156b40->Object::Object { }
+Next token is token 'a' (0xaaaaeb156b40 'a')
+Shifting token 'a' (0xaaaaeb156b40 'a')
 Entering state 1
 Stack now 0 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab0436db40 'a')
--> $$ = nterm item (0xaaab0436db40 'a')
+   $1 = token 'a' (0xaaaaeb156b40 'a')
+-> $$ = nterm item (0xaaaaeb156b40 'a')
 Entering state 10
 Stack now 0 10
 Reading a token
-0xaaab0436db90->Object::Object { 0xaaab0436db40 }
-Next token is token 'a' (0xaaab0436db90 'a')
-Shifting token 'a' (0xaaab0436db90 'a')
+0xaaaaeb156b90->Object::Object { 0xaaaaeb156b40 }
+Next token is token 'a' (0xaaaaeb156b90 'a')
+Shifting token 'a' (0xaaaaeb156b90 'a')
 Entering state 1
 Stack now 0 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab0436db90 'a')
--> $$ = nterm item (0xaaab0436db90 'a')
+   $1 = token 'a' (0xaaaaeb156b90 'a')
+-> $$ = nterm item (0xaaaaeb156b90 'a')
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xaaab0436dbe0->Object::Object { 0xaaab0436db40, 0xaaab0436db90 }
-Next token is token 'a' (0xaaab0436dbe0 'a')
-Shifting token 'a' (0xaaab0436dbe0 'a')
+0xaaaaeb156be0->Object::Object { 0xaaaaeb156b40, 0xaaaaeb156b90 }
+Next token is token 'a' (0xaaaaeb156be0 'a')
+Shifting token 'a' (0xaaaaeb156be0 'a')
 Entering state 1
 Stack now 0 10 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab0436dbe0 'a')
--> $$ = nterm item (0xaaab0436dbe0 'a')
+   $1 = token 'a' (0xaaaaeb156be0 'a')
+-> $$ = nterm item (0xaaaaeb156be0 'a')
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xaaab0436dc30->Object::Object { 0xaaab0436db40, 0xaaab0436db90, 0xaaab0436dbe0 }
-Next token is token 'a' (0xaaab0436dc30 'a')
-Shifting token 'a' (0xaaab0436dc30 'a')
+0xaaaaeb156c30->Object::Object { 0xaaaaeb156b40, 0xaaaaeb156b90, 0xaaaaeb156be0 }
+Next token is token 'a' (0xaaaaeb156c30 'a')
+Shifting token 'a' (0xaaaaeb156c30 'a')
 Entering state 1
 Stack now 0 10 10 10 1
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab0436dc30 'a')
--> $$ = nterm item (0xaaab0436dc30 'a')
+   $1 = token 'a' (0xaaaaeb156c30 'a')
+-> $$ = nterm item (0xaaaaeb156c30 'a')
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xaaab0436dc80->Object::Object { 0xaaab0436db40, 0xaaab0436db90, 0xaaab0436dbe0, 0xaaab0436dc30 }
-Next token is token 'p' (0xaaab0436dc80 'p'Exception caught: cleaning lookahead and stack
-0xaaab0436dc80->Object::~Object { 0xaaab0436db40, 0xaaab0436db90, 0xaaab0436dbe0, 0xaaab0436dc30, 0xaaab0436dc80 }
-0xaaab0436dc30->Object::~Object { 0xaaab0436db40, 0xaaab0436db90, 0xaaab0436dbe0, 0xaaab0436dc30 }
-0xaaab0436dbe0->Object::~Object { 0xaaab0436db40, 0xaaab0436db90, 0xaaab0436dbe0 }
-0xaaab0436db90->Object::~Object { 0xaaab0436db40, 0xaaab0436db90 }
-0xaaab0436db40->Object::~Object { 0xaaab0436db40 }
+0xaaaaeb156c80->Object::Object { 0xaaaaeb156b40, 0xaaaaeb156b90, 0xaaaaeb156be0, 0xaaaaeb156c30 }
+Next token is token 'p' (0xaaaaeb156c80 'p'Exception caught: cleaning lookahead and stack
+0xaaaaeb156c80->Object::~Object { 0xaaaaeb156b40, 0xaaaaeb156b90, 0xaaaaeb156be0, 0xaaaaeb156c30, 0xaaaaeb156c80 }
+0xaaaaeb156c30->Object::~Object { 0xaaaaeb156b40, 0xaaaaeb156b90, 0xaaaaeb156be0, 0xaaaaeb156c30 }
+0xaaaaeb156be0->Object::~Object { 0xaaaaeb156b40, 0xaaaaeb156b90, 0xaaaaeb156be0 }
+0xaaaaeb156b90->Object::~Object { 0xaaaaeb156b40, 0xaaaaeb156b90 }
+0xaaaaeb156b40->Object::~Object { 0xaaaaeb156b40 }
 exception caught: printer
 end { }
 ./c++.at:1361: grep '^exception caught: printer$' stderr
 stdout:
+stderr:
 exception caught: printer
 ./c++.at:1361:  $PREPARSER ./input aaaae
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 exception caught: syntax error
 ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:1363: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 ./c++.at:1361:  $PREPARSER ./input aaaaE
 stderr:
 exception caught: syntax error, unexpected end of file, expecting 'a'
@@ -255689,10 +255814,36 @@
 ./c++.at:1361: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
+./c++.at:858:  $PREPARSER ./input
+stderr:
+./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:858: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./c++.at:1463: sed -ne '/INCLUDED/p;/\\file/{p;n;p;}' include/ast/loc.hh
+./c++.at:1471: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -o x2.cc x2.yy
+./c++.at:1471: $CXX $CPPFLAGS  $CXXFLAGS -Iinclude -c -o x2.o x2.cc 
+stderr:
+stdout:
+======== Testing with C++ standard flags: ''
+./c++.at:1411: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./c++.at:858:  $PREPARSER ./input
+stderr:
+./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:858: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
 ./c++.at:1362:  $PREPARSER ./input aaaas
 stderr:
+stdout:
+stderr:
 exception caught: reduction
 ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1501: $CXX $CPPFLAGS  $CXXFLAGS -Iinclude $LDFLAGS -o parser x[12].o main.cc $LIBS
 ./c++.at:1362:  $PREPARSER ./input aaaal
 stderr:
 exception caught: yylex
@@ -255710,99 +255861,99 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xfffff6b4aa10->Object::Object { }
-0xfffff6b4aae0->Object::Object { 0xfffff6b4aa10 }
-0xfffff6b4aa10->Object::~Object { 0xfffff6b4aa10, 0xfffff6b4aae0 }
-Next token is token 'a' (0xfffff6b4aae0 'a')
-0xfffff6b4aa00->Object::Object { 0xfffff6b4aae0 }
-0xfffff6b4aae0->Object::~Object { 0xfffff6b4aa00, 0xfffff6b4aae0 }
-Shifting token 'a' (0xfffff6b4aa00 'a')
-0xaaaad391cee0->Object::Object { 0xfffff6b4aa00 }
-0xfffff6b4aa00->Object::~Object { 0xaaaad391cee0, 0xfffff6b4aa00 }
+0xfffffbcc7b50->Object::Object { }
+0xfffffbcc7c20->Object::Object { 0xfffffbcc7b50 }
+0xfffffbcc7b50->Object::~Object { 0xfffffbcc7b50, 0xfffffbcc7c20 }
+Next token is token 'a' (0xfffffbcc7c20 'a')
+0xfffffbcc7b40->Object::Object { 0xfffffbcc7c20 }
+0xfffffbcc7c20->Object::~Object { 0xfffffbcc7b40, 0xfffffbcc7c20 }
+Shifting token 'a' (0xfffffbcc7b40 'a')
+0xaaaad5e31ee0->Object::Object { 0xfffffbcc7b40 }
+0xfffffbcc7b40->Object::~Object { 0xaaaad5e31ee0, 0xfffffbcc7b40 }
 Entering state 2
 Stack now 0 2
-0xfffff6b4ab00->Object::Object { 0xaaaad391cee0 }
+0xfffffbcc7c40->Object::Object { 0xaaaad5e31ee0 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaad391cee0 'a')
--> $$ = nterm item (0xfffff6b4ab00 'a')
-0xaaaad391cee0->Object::~Object { 0xaaaad391cee0, 0xfffff6b4ab00 }
-0xaaaad391cee0->Object::Object { 0xfffff6b4ab00 }
-0xfffff6b4ab00->Object::~Object { 0xaaaad391cee0, 0xfffff6b4ab00 }
+   $1 = token 'a' (0xaaaad5e31ee0 'a')
+-> $$ = nterm item (0xfffffbcc7c40 'a')
+0xaaaad5e31ee0->Object::~Object { 0xaaaad5e31ee0, 0xfffffbcc7c40 }
+0xaaaad5e31ee0->Object::Object { 0xfffffbcc7c40 }
+0xfffffbcc7c40->Object::~Object { 0xaaaad5e31ee0, 0xfffffbcc7c40 }
 Entering state 11
 Stack now 0 11
 Reading a token
-0xfffff6b4aa10->Object::Object { 0xaaaad391cee0 }
-0xfffff6b4aae0->Object::Object { 0xaaaad391cee0, 0xfffff6b4aa10 }
-0xfffff6b4aa10->Object::~Object { 0xaaaad391cee0, 0xfffff6b4aa10, 0xfffff6b4aae0 }
-Next token is token 'a' (0xfffff6b4aae0 'a')
-0xfffff6b4aa00->Object::Object { 0xaaaad391cee0, 0xfffff6b4aae0 }
-0xfffff6b4aae0->Object::~Object { 0xaaaad391cee0, 0xfffff6b4aa00, 0xfffff6b4aae0 }
-Shifting token 'a' (0xfffff6b4aa00 'a')
-0xaaaad391cf00->Object::Object { 0xaaaad391cee0, 0xfffff6b4aa00 }
-0xfffff6b4aa00->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xfffff6b4aa00 }
+0xfffffbcc7b50->Object::Object { 0xaaaad5e31ee0 }
+0xfffffbcc7c20->Object::Object { 0xaaaad5e31ee0, 0xfffffbcc7b50 }
+0xfffffbcc7b50->Object::~Object { 0xaaaad5e31ee0, 0xfffffbcc7b50, 0xfffffbcc7c20 }
+Next token is token 'a' (0xfffffbcc7c20 'a')
+0xfffffbcc7b40->Object::Object { 0xaaaad5e31ee0, 0xfffffbcc7c20 }
+0xfffffbcc7c20->Object::~Object { 0xaaaad5e31ee0, 0xfffffbcc7b40, 0xfffffbcc7c20 }
+Shifting token 'a' (0xfffffbcc7b40 'a')
+0xaaaad5e31f00->Object::Object { 0xaaaad5e31ee0, 0xfffffbcc7b40 }
+0xfffffbcc7b40->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xfffffbcc7b40 }
 Entering state 2
 Stack now 0 11 2
-0xfffff6b4ab00->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00 }
+0xfffffbcc7c40->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaad391cf00 'a')
--> $$ = nterm item (0xfffff6b4ab00 'a')
-0xaaaad391cf00->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xfffff6b4ab00 }
-0xaaaad391cf00->Object::Object { 0xaaaad391cee0, 0xfffff6b4ab00 }
-0xfffff6b4ab00->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xfffff6b4ab00 }
+   $1 = token 'a' (0xaaaad5e31f00 'a')
+-> $$ = nterm item (0xfffffbcc7c40 'a')
+0xaaaad5e31f00->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xfffffbcc7c40 }
+0xaaaad5e31f00->Object::Object { 0xaaaad5e31ee0, 0xfffffbcc7c40 }
+0xfffffbcc7c40->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xfffffbcc7c40 }
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xfffff6b4aa10->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00 }
-0xfffff6b4aae0->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xfffff6b4aa10 }
-0xfffff6b4aa10->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xfffff6b4aa10, 0xfffff6b4aae0 }
-Next token is token 'a' (0xfffff6b4aae0 'a')
-0xfffff6b4aa00->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xfffff6b4aae0 }
-0xfffff6b4aae0->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xfffff6b4aa00, 0xfffff6b4aae0 }
-Shifting token 'a' (0xfffff6b4aa00 'a')
-0xaaaad391cf20->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xfffff6b4aa00 }
-0xfffff6b4aa00->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xfffff6b4aa00 }
+0xfffffbcc7b50->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00 }
+0xfffffbcc7c20->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xfffffbcc7b50 }
+0xfffffbcc7b50->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xfffffbcc7b50, 0xfffffbcc7c20 }
+Next token is token 'a' (0xfffffbcc7c20 'a')
+0xfffffbcc7b40->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xfffffbcc7c20 }
+0xfffffbcc7c20->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xfffffbcc7b40, 0xfffffbcc7c20 }
+Shifting token 'a' (0xfffffbcc7b40 'a')
+0xaaaad5e31f20->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xfffffbcc7b40 }
+0xfffffbcc7b40->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xfffffbcc7b40 }
 Entering state 2
 Stack now 0 11 11 2
-0xfffff6b4ab00->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20 }
+0xfffffbcc7c40->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaad391cf20 'a')
--> $$ = nterm item (0xfffff6b4ab00 'a')
-0xaaaad391cf20->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xfffff6b4ab00 }
-0xaaaad391cf20->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xfffff6b4ab00 }
-0xfffff6b4ab00->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xfffff6b4ab00 }
+   $1 = token 'a' (0xaaaad5e31f20 'a')
+-> $$ = nterm item (0xfffffbcc7c40 'a')
+0xaaaad5e31f20->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xfffffbcc7c40 }
+0xaaaad5e31f20->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xfffffbcc7c40 }
+0xfffffbcc7c40->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xfffffbcc7c40 }
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xfffff6b4aa10->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20 }
-0xfffff6b4aae0->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xfffff6b4aa10 }
-0xfffff6b4aa10->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xfffff6b4aa10, 0xfffff6b4aae0 }
-Next token is token 'a' (0xfffff6b4aae0 'a')
-0xfffff6b4aa00->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xfffff6b4aae0 }
-0xfffff6b4aae0->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xfffff6b4aa00, 0xfffff6b4aae0 }
-Shifting token 'a' (0xfffff6b4aa00 'a')
-0xaaaad391cf40->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xfffff6b4aa00 }
-0xfffff6b4aa00->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xaaaad391cf40, 0xfffff6b4aa00 }
+0xfffffbcc7b50->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20 }
+0xfffffbcc7c20->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xfffffbcc7b50 }
+0xfffffbcc7b50->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xfffffbcc7b50, 0xfffffbcc7c20 }
+Next token is token 'a' (0xfffffbcc7c20 'a')
+0xfffffbcc7b40->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xfffffbcc7c20 }
+0xfffffbcc7c20->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xfffffbcc7b40, 0xfffffbcc7c20 }
+Shifting token 'a' (0xfffffbcc7b40 'a')
+0xaaaad5e31f40->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xfffffbcc7b40 }
+0xfffffbcc7b40->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xaaaad5e31f40, 0xfffffbcc7b40 }
 Entering state 2
 Stack now 0 11 11 11 2
-0xfffff6b4ab00->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xaaaad391cf40 }
+0xfffffbcc7c40->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xaaaad5e31f40 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaad391cf40 'a')
--> $$ = nterm item (0xfffff6b4ab00 'a')
-0xaaaad391cf40->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xaaaad391cf40, 0xfffff6b4ab00 }
-0xaaaad391cf40->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xfffff6b4ab00 }
-0xfffff6b4ab00->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xaaaad391cf40, 0xfffff6b4ab00 }
+   $1 = token 'a' (0xaaaad5e31f40 'a')
+-> $$ = nterm item (0xfffffbcc7c40 'a')
+0xaaaad5e31f40->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xaaaad5e31f40, 0xfffffbcc7c40 }
+0xaaaad5e31f40->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xfffffbcc7c40 }
+0xfffffbcc7c40->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xaaaad5e31f40, 0xfffffbcc7c40 }
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xfffff6b4aa10->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xaaaad391cf40 }
-0xfffff6b4aae0->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xaaaad391cf40, 0xfffff6b4aa10 }
-0xfffff6b4aa10->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xaaaad391cf40, 0xfffff6b4aa10, 0xfffff6b4aae0 }
-Next token is token 'p' (0xfffff6b4aae0 'p'Exception caught: cleaning lookahead and stack
-0xaaaad391cf40->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xaaaad391cf40, 0xfffff6b4aae0 }
-0xaaaad391cf20->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xfffff6b4aae0 }
-0xaaaad391cf00->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xfffff6b4aae0 }
-0xaaaad391cee0->Object::~Object { 0xaaaad391cee0, 0xfffff6b4aae0 }
-0xfffff6b4aae0->Object::~Object { 0xfffff6b4aae0 }
+0xfffffbcc7b50->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xaaaad5e31f40 }
+0xfffffbcc7c20->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xaaaad5e31f40, 0xfffffbcc7b50 }
+0xfffffbcc7b50->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xaaaad5e31f40, 0xfffffbcc7b50, 0xfffffbcc7c20 }
+Next token is token 'p' (0xfffffbcc7c20 'p'Exception caught: cleaning lookahead and stack
+0xaaaad5e31f40->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xaaaad5e31f40, 0xfffffbcc7c20 }
+0xaaaad5e31f20->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xfffffbcc7c20 }
+0xaaaad5e31f00->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xfffffbcc7c20 }
+0xaaaad5e31ee0->Object::~Object { 0xaaaad5e31ee0, 0xfffffbcc7c20 }
+0xfffffbcc7c20->Object::~Object { 0xfffffbcc7c20 }
 exception caught: printer
 end { }
 ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -255811,99 +255962,99 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xfffff6b4aa10->Object::Object { }
-0xfffff6b4aae0->Object::Object { 0xfffff6b4aa10 }
-0xfffff6b4aa10->Object::~Object { 0xfffff6b4aa10, 0xfffff6b4aae0 }
-Next token is token 'a' (0xfffff6b4aae0 'a')
-0xfffff6b4aa00->Object::Object { 0xfffff6b4aae0 }
-0xfffff6b4aae0->Object::~Object { 0xfffff6b4aa00, 0xfffff6b4aae0 }
-Shifting token 'a' (0xfffff6b4aa00 'a')
-0xaaaad391cee0->Object::Object { 0xfffff6b4aa00 }
-0xfffff6b4aa00->Object::~Object { 0xaaaad391cee0, 0xfffff6b4aa00 }
+0xfffffbcc7b50->Object::Object { }
+0xfffffbcc7c20->Object::Object { 0xfffffbcc7b50 }
+0xfffffbcc7b50->Object::~Object { 0xfffffbcc7b50, 0xfffffbcc7c20 }
+Next token is token 'a' (0xfffffbcc7c20 'a')
+0xfffffbcc7b40->Object::Object { 0xfffffbcc7c20 }
+0xfffffbcc7c20->Object::~Object { 0xfffffbcc7b40, 0xfffffbcc7c20 }
+Shifting token 'a' (0xfffffbcc7b40 'a')
+0xaaaad5e31ee0->Object::Object { 0xfffffbcc7b40 }
+0xfffffbcc7b40->Object::~Object { 0xaaaad5e31ee0, 0xfffffbcc7b40 }
 Entering state 2
 Stack now 0 2
-0xfffff6b4ab00->Object::Object { 0xaaaad391cee0 }
+0xfffffbcc7c40->Object::Object { 0xaaaad5e31ee0 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaad391cee0 'a')
--> $$ = nterm item (0xfffff6b4ab00 'a')
-0xaaaad391cee0->Object::~Object { 0xaaaad391cee0, 0xfffff6b4ab00 }
-0xaaaad391cee0->Object::Object { 0xfffff6b4ab00 }
-0xfffff6b4ab00->Object::~Object { 0xaaaad391cee0, 0xfffff6b4ab00 }
+   $1 = token 'a' (0xaaaad5e31ee0 'a')
+-> $$ = nterm item (0xfffffbcc7c40 'a')
+0xaaaad5e31ee0->Object::~Object { 0xaaaad5e31ee0, 0xfffffbcc7c40 }
+0xaaaad5e31ee0->Object::Object { 0xfffffbcc7c40 }
+0xfffffbcc7c40->Object::~Object { 0xaaaad5e31ee0, 0xfffffbcc7c40 }
 Entering state 11
 Stack now 0 11
 Reading a token
-0xfffff6b4aa10->Object::Object { 0xaaaad391cee0 }
-0xfffff6b4aae0->Object::Object { 0xaaaad391cee0, 0xfffff6b4aa10 }
-0xfffff6b4aa10->Object::~Object { 0xaaaad391cee0, 0xfffff6b4aa10, 0xfffff6b4aae0 }
-Next token is token 'a' (0xfffff6b4aae0 'a')
-0xfffff6b4aa00->Object::Object { 0xaaaad391cee0, 0xfffff6b4aae0 }
-0xfffff6b4aae0->Object::~Object { 0xaaaad391cee0, 0xfffff6b4aa00, 0xfffff6b4aae0 }
-Shifting token 'a' (0xfffff6b4aa00 'a')
-0xaaaad391cf00->Object::Object { 0xaaaad391cee0, 0xfffff6b4aa00 }
-0xfffff6b4aa00->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xfffff6b4aa00 }
+0xfffffbcc7b50->Object::Object { 0xaaaad5e31ee0 }
+0xfffffbcc7c20->Object::Object { 0xaaaad5e31ee0, 0xfffffbcc7b50 }
+0xfffffbcc7b50->Object::~Object { 0xaaaad5e31ee0, 0xfffffbcc7b50, 0xfffffbcc7c20 }
+Next token is token 'a' (0xfffffbcc7c20 'a')
+0xfffffbcc7b40->Object::Object { 0xaaaad5e31ee0, 0xfffffbcc7c20 }
+0xfffffbcc7c20->Object::~Object { 0xaaaad5e31ee0, 0xfffffbcc7b40, 0xfffffbcc7c20 }
+Shifting token 'a' (0xfffffbcc7b40 'a')
+0xaaaad5e31f00->Object::Object { 0xaaaad5e31ee0, 0xfffffbcc7b40 }
+0xfffffbcc7b40->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xfffffbcc7b40 }
 Entering state 2
 Stack now 0 11 2
-0xfffff6b4ab00->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00 }
+0xfffffbcc7c40->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaad391cf00 'a')
--> $$ = nterm item (0xfffff6b4ab00 'a')
-0xaaaad391cf00->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xfffff6b4ab00 }
-0xaaaad391cf00->Object::Object { 0xaaaad391cee0, 0xfffff6b4ab00 }
-0xfffff6b4ab00->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xfffff6b4ab00 }
+   $1 = token 'a' (0xaaaad5e31f00 'a')
+-> $$ = nterm item (0xfffffbcc7c40 'a')
+0xaaaad5e31f00->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xfffffbcc7c40 }
+0xaaaad5e31f00->Object::Object { 0xaaaad5e31ee0, 0xfffffbcc7c40 }
+0xfffffbcc7c40->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xfffffbcc7c40 }
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xfffff6b4aa10->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00 }
-0xfffff6b4aae0->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xfffff6b4aa10 }
-0xfffff6b4aa10->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xfffff6b4aa10, 0xfffff6b4aae0 }
-Next token is token 'a' (0xfffff6b4aae0 'a')
-0xfffff6b4aa00->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xfffff6b4aae0 }
-0xfffff6b4aae0->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xfffff6b4aa00, 0xfffff6b4aae0 }
-Shifting token 'a' (0xfffff6b4aa00 'a')
-0xaaaad391cf20->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xfffff6b4aa00 }
-0xfffff6b4aa00->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xfffff6b4aa00 }
+0xfffffbcc7b50->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00 }
+0xfffffbcc7c20->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xfffffbcc7b50 }
+0xfffffbcc7b50->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xfffffbcc7b50, 0xfffffbcc7c20 }
+Next token is token 'a' (0xfffffbcc7c20 'a')
+0xfffffbcc7b40->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xfffffbcc7c20 }
+0xfffffbcc7c20->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xfffffbcc7b40, 0xfffffbcc7c20 }
+Shifting token 'a' (0xfffffbcc7b40 'a')
+0xaaaad5e31f20->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xfffffbcc7b40 }
+0xfffffbcc7b40->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xfffffbcc7b40 }
 Entering state 2
 Stack now 0 11 11 2
-0xfffff6b4ab00->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20 }
+0xfffffbcc7c40->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaad391cf20 'a')
--> $$ = nterm item (0xfffff6b4ab00 'a')
-0xaaaad391cf20->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xfffff6b4ab00 }
-0xaaaad391cf20->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xfffff6b4ab00 }
-0xfffff6b4ab00->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xfffff6b4ab00 }
+   $1 = token 'a' (0xaaaad5e31f20 'a')
+-> $$ = nterm item (0xfffffbcc7c40 'a')
+0xaaaad5e31f20->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xfffffbcc7c40 }
+0xaaaad5e31f20->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xfffffbcc7c40 }
+0xfffffbcc7c40->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xfffffbcc7c40 }
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xfffff6b4aa10->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20 }
-0xfffff6b4aae0->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xfffff6b4aa10 }
-0xfffff6b4aa10->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xfffff6b4aa10, 0xfffff6b4aae0 }
-Next token is token 'a' (0xfffff6b4aae0 'a')
-0xfffff6b4aa00->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xfffff6b4aae0 }
-0xfffff6b4aae0->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xfffff6b4aa00, 0xfffff6b4aae0 }
-Shifting token 'a' (0xfffff6b4aa00 'a')
-0xaaaad391cf40->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xfffff6b4aa00 }
-0xfffff6b4aa00->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xaaaad391cf40, 0xfffff6b4aa00 }
+0xfffffbcc7b50->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20 }
+0xfffffbcc7c20->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xfffffbcc7b50 }
+0xfffffbcc7b50->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xfffffbcc7b50, 0xfffffbcc7c20 }
+Next token is token 'a' (0xfffffbcc7c20 'a')
+0xfffffbcc7b40->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xfffffbcc7c20 }
+0xfffffbcc7c20->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xfffffbcc7b40, 0xfffffbcc7c20 }
+Shifting token 'a' (0xfffffbcc7b40 'a')
+0xaaaad5e31f40->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xfffffbcc7b40 }
+0xfffffbcc7b40->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xaaaad5e31f40, 0xfffffbcc7b40 }
 Entering state 2
 Stack now 0 11 11 11 2
-0xfffff6b4ab00->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xaaaad391cf40 }
+0xfffffbcc7c40->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xaaaad5e31f40 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaad391cf40 'a')
--> $$ = nterm item (0xfffff6b4ab00 'a')
-0xaaaad391cf40->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xaaaad391cf40, 0xfffff6b4ab00 }
-0xaaaad391cf40->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xfffff6b4ab00 }
-0xfffff6b4ab00->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xaaaad391cf40, 0xfffff6b4ab00 }
+   $1 = token 'a' (0xaaaad5e31f40 'a')
+-> $$ = nterm item (0xfffffbcc7c40 'a')
+0xaaaad5e31f40->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xaaaad5e31f40, 0xfffffbcc7c40 }
+0xaaaad5e31f40->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xfffffbcc7c40 }
+0xfffffbcc7c40->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xaaaad5e31f40, 0xfffffbcc7c40 }
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xfffff6b4aa10->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xaaaad391cf40 }
-0xfffff6b4aae0->Object::Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xaaaad391cf40, 0xfffff6b4aa10 }
-0xfffff6b4aa10->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xaaaad391cf40, 0xfffff6b4aa10, 0xfffff6b4aae0 }
-Next token is token 'p' (0xfffff6b4aae0 'p'Exception caught: cleaning lookahead and stack
-0xaaaad391cf40->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xaaaad391cf40, 0xfffff6b4aae0 }
-0xaaaad391cf20->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xaaaad391cf20, 0xfffff6b4aae0 }
-0xaaaad391cf00->Object::~Object { 0xaaaad391cee0, 0xaaaad391cf00, 0xfffff6b4aae0 }
-0xaaaad391cee0->Object::~Object { 0xaaaad391cee0, 0xfffff6b4aae0 }
-0xfffff6b4aae0->Object::~Object { 0xfffff6b4aae0 }
+0xfffffbcc7b50->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xaaaad5e31f40 }
+0xfffffbcc7c20->Object::Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xaaaad5e31f40, 0xfffffbcc7b50 }
+0xfffffbcc7b50->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xaaaad5e31f40, 0xfffffbcc7b50, 0xfffffbcc7c20 }
+Next token is token 'p' (0xfffffbcc7c20 'p'Exception caught: cleaning lookahead and stack
+0xaaaad5e31f40->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xaaaad5e31f40, 0xfffffbcc7c20 }
+0xaaaad5e31f20->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xaaaad5e31f20, 0xfffffbcc7c20 }
+0xaaaad5e31f00->Object::~Object { 0xaaaad5e31ee0, 0xaaaad5e31f00, 0xfffffbcc7c20 }
+0xaaaad5e31ee0->Object::~Object { 0xaaaad5e31ee0, 0xfffffbcc7c20 }
+0xfffffbcc7c20->Object::~Object { 0xfffffbcc7c20 }
 exception caught: printer
 end { }
 ./c++.at:1362: grep '^exception caught: printer$' stderr
@@ -255927,391 +256078,581 @@
 ./c++.at:1362: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:858:  $PREPARSER ./input
-stderr:
-./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:858: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
 ======== Testing with C++ standard flags: ''
 ./c++.at:1411: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:1463: sed -ne '/INCLUDED/p;/\\file/{p;n;p;}' include/ast/loc.hh
-./c++.at:1471: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -o x2.cc x2.yy
-./c++.at:1471: $CXX $CPPFLAGS  $CXXFLAGS -Iinclude -c -o x2.o x2.cc 
+./c++.at:1361:  $PREPARSER ./input aaaas
+stderr:
+exception caught: reduction
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stdout:
 ./c++.at:1363:  $PREPARSER ./input aaaas
 stderr:
 exception caught: reduction
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1361:  $PREPARSER ./input aaaal
+stderr:
+exception caught: yylex
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1363:  $PREPARSER ./input aaaal
 stderr:
 exception caught: yylex
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1363:  $PREPARSER ./input i
+./c++.at:1361:  $PREPARSER ./input i
 stderr:
+stderr:
+exception caught: initial-action
 exception caught: initial-action
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1363:  $PREPARSER ./input aaaap
 stderr:
+./c++.at:1361:  $PREPARSER ./input aaaap
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1363:  $PREPARSER ./input --debug aaaap
 stderr:
+./c++.at:1361:  $PREPARSER ./input --debug aaaap
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+0xfffff0ef66d0->Object::Object { }
+0xfffff0ef67a0->Object::Object { 0xfffff0ef66d0 }
+0xfffff0ef66d0->Object::~Object { 0xfffff0ef66d0, 0xfffff0ef67a0 }
+Next token is token 'a' (0xfffff0ef67a0 'a')
+0xfffff0ef66e0->Object::Object { 0xfffff0ef67a0 }
+0xfffff0ef6690->Object::Object { 0xfffff0ef66e0, 0xfffff0ef67a0 }
+0xfffff0ef6690->Object::~Object { 0xfffff0ef6690, 0xfffff0ef66e0, 0xfffff0ef67a0 }
+0xfffff0ef67a0->Object::~Object { 0xfffff0ef66e0, 0xfffff0ef67a0 }
+Shifting token 'a' (0xfffff0ef66e0 'a')
+0xaaaab936dee0->Object::Object { 0xfffff0ef66e0 }
+0xfffff0ef6668->Object::Object { 0xaaaab936dee0, 0xfffff0ef66e0 }
+0xfffff0ef6668->Object::~Object { 0xaaaab936dee0, 0xfffff0ef6668, 0xfffff0ef66e0 }
+0xfffff0ef66e0->Object::~Object { 0xaaaab936dee0, 0xfffff0ef66e0 }
+Entering state 1
+Stack now 0 1
+0xfffff0ef67c0->Object::Object { 0xaaaab936dee0 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaab936dee0 'a')
+-> $$ = nterm item (0xfffff0ef67c0 'a')
+0xaaaab936dee0->Object::~Object { 0xaaaab936dee0, 0xfffff0ef67c0 }
+0xaaaab936dee0->Object::Object { 0xfffff0ef67c0 }
+0xfffff0ef6770->Object::Object { 0xaaaab936dee0, 0xfffff0ef67c0 }
+0xfffff0ef6770->Object::~Object { 0xaaaab936dee0, 0xfffff0ef6770, 0xfffff0ef67c0 }
+0xfffff0ef67c0->Object::~Object { 0xaaaab936dee0, 0xfffff0ef67c0 }
+Entering state 10
+Stack now 0 10
+Reading a token
+0xfffff0ef66d0->Object::Object { 0xaaaab936dee0 }
+0xfffff0ef67a0->Object::Object { 0xaaaab936dee0, 0xfffff0ef66d0 }
+0xfffff0ef66d0->Object::~Object { 0xaaaab936dee0, 0xfffff0ef66d0, 0xfffff0ef67a0 }
+Next token is token 'a' (0xfffff0ef67a0 'a')
+0xfffff0ef66e0->Object::Object { 0xaaaab936dee0, 0xfffff0ef67a0 }
+0xfffff0ef6690->Object::Object { 0xaaaab936dee0, 0xfffff0ef66e0, 0xfffff0ef67a0 }
+0xfffff0ef6690->Object::~Object { 0xaaaab936dee0, 0xfffff0ef6690, 0xfffff0ef66e0, 0xfffff0ef67a0 }
+0xfffff0ef67a0->Object::~Object { 0xaaaab936dee0, 0xfffff0ef66e0, 0xfffff0ef67a0 }
+Shifting token 'a' (0xfffff0ef66e0 'a')
+0xaaaab936df00->Object::Object { 0xaaaab936dee0, 0xfffff0ef66e0 }
+0xfffff0ef6668->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef66e0 }
+0xfffff0ef6668->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef6668, 0xfffff0ef66e0 }
+0xfffff0ef66e0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef66e0 }
+Entering state 1
+Stack now 0 10 1
+0xfffff0ef67c0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaab936df00 'a')
+-> $$ = nterm item (0xfffff0ef67c0 'a')
+0xaaaab936df00->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef67c0 }
+0xaaaab936df00->Object::Object { 0xaaaab936dee0, 0xfffff0ef67c0 }
+0xfffff0ef6770->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef67c0 }
+0xfffff0ef6770->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef6770, 0xfffff0ef67c0 }
+0xfffff0ef67c0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef67c0 }
+Entering state 10
+Stack now 0 10 10
+Reading a token
+0xfffff0ef66d0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00 }
+0xfffff0ef67a0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef66d0 }
+0xfffff0ef66d0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef66d0, 0xfffff0ef67a0 }
+Next token is token 'a' (0xfffff0ef67a0 'a')
+0xfffff0ef66e0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef67a0 }
+0xfffff0ef6690->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef66e0, 0xfffff0ef67a0 }
+0xfffff0ef6690->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef6690, 0xfffff0ef66e0, 0xfffff0ef67a0 }
+0xfffff0ef67a0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef66e0, 0xfffff0ef67a0 }
+Shifting token 'a' (0xfffff0ef66e0 'a')
+0xaaaab936df20->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef66e0 }
+0xfffff0ef6668->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef66e0 }
+0xfffff0ef6668->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef6668, 0xfffff0ef66e0 }
+0xfffff0ef66e0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef66e0 }
+Entering state 1
+Stack now 0 10 10 1
+0xfffff0ef67c0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaab936df20 'a')
+-> $$ = nterm item (0xfffff0ef67c0 'a')
+0xaaaab936df20->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef67c0 }
+0xaaaab936df20->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef67c0 }
+0xfffff0ef6770->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef67c0 }
+0xfffff0ef6770->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef6770, 0xfffff0ef67c0 }
+0xfffff0ef67c0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef67c0 }
+Entering state 10
+Stack now 0 10 10 10
+Reading a token
+0xfffff0ef66d0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20 }
+0xfffff0ef67a0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef66d0 }
+0xfffff0ef66d0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef66d0, 0xfffff0ef67a0 }
+Next token is token 'a' (0xfffff0ef67a0 'a')
+0xfffff0ef66e0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef67a0 }
+0xfffff0ef6690->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef66e0, 0xfffff0ef67a0 }
+0xfffff0ef6690->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef6690, 0xfffff0ef66e0, 0xfffff0ef67a0 }
+0xfffff0ef67a0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef66e0, 0xfffff0ef67a0 }
+Shifting token 'a' (0xfffff0ef66e0 'a')
+0xaaaab936df40->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef66e0 }
+0xfffff0ef6668->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40, 0xfffff0ef66e0 }
+0xfffff0ef6668->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40, 0xfffff0ef6668, 0xfffff0ef66e0 }
+0xfffff0ef66e0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40, 0xfffff0ef66e0 }
+Entering state 1
+Stack now 0 10 10 10 1
+0xfffff0ef67c0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaab936df40 'a')
+-> $$ = nterm item (0xfffff0ef67c0 'a')
+0xaaaab936df40->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40, 0xfffff0ef67c0 }
+0xaaaab936df40->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef67c0 }
+0xfffff0ef6770->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40, 0xfffff0ef67c0 }
+0xfffff0ef6770->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40, 0xfffff0ef6770, 0xfffff0ef67c0 }
+0xfffff0ef67c0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40, 0xfffff0ef67c0 }
+Entering state 10
+Stack now 0 10 10 10 10
+Reading a token
+0xfffff0ef66d0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40 }
+0xfffff0ef67a0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40, 0xfffff0ef66d0 }
+0xfffff0ef66d0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40, 0xfffff0ef66d0, 0xfffff0ef67a0 }
+Next token is token 'p' (0xfffff0ef67a0 'p'Exception caught: cleaning lookahead and stack
+0xaaaab936df40->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40, 0xfffff0ef67a0 }
+0xaaaab936df20->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef67a0 }
+0xaaaab936df00->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef67a0 }
+0xaaaab936dee0->Object::~Object { 0xaaaab936dee0, 0xfffff0ef67a0 }
+0xfffff0ef67a0->Object::~Object { 0xfffff0ef67a0 }
+exception caught: printer
+end { }
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-0xfffffd0cbe60->Object::Object { }
-0xfffffd0cbf50->Object::Object { 0xfffffd0cbe60 }
-0xfffffd0cbe60->Object::~Object { 0xfffffd0cbe60, 0xfffffd0cbf50 }
-Next token is token 'a' (0xfffffd0cbf50 'a')
-0xfffffd0cbe70->Object::Object { 0xfffffd0cbf50 }
-0xfffffd0cbf50->Object::~Object { 0xfffffd0cbe70, 0xfffffd0cbf50 }
-Shifting token 'a' (0xfffffd0cbe70 'a')
-0xaaaae6836ee0->Object::Object { 0xfffffd0cbe70 }
-0xfffffd0cbe70->Object::~Object { 0xaaaae6836ee0, 0xfffffd0cbe70 }
+0xfffff0ef66d0->Object::Object { }
+0xfffff0ef67a0->Object::Object { 0xfffff0ef66d0 }
+0xfffff0ef66d0->Object::~Object { 0xfffff0ef66d0, 0xfffff0ef67a0 }
+Next token is token 'a' (0xfffff0ef67a0 'a')
+0xfffff0ef66e0->Object::Object { 0xfffff0ef67a0 }
+0xfffff0ef6690->Object::Object { 0xfffff0ef66e0, 0xfffff0ef67a0 }
+0xfffff0ef6690->Object::~Object { 0xfffff0ef6690, 0xfffff0ef66e0, 0xfffff0ef67a0 }
+0xfffff0ef67a0->Object::~Object { 0xfffff0ef66e0, 0xfffff0ef67a0 }
+Shifting token 'a' (0xfffff0ef66e0 'a')
+0xaaaab936dee0->Object::Object { 0xfffff0ef66e0 }
+0xfffff0ef6668->Object::Object { 0xaaaab936dee0, 0xfffff0ef66e0 }
+0xfffff0ef6668->Object::~Object { 0xaaaab936dee0, 0xfffff0ef6668, 0xfffff0ef66e0 }
+0xfffff0ef66e0->Object::~Object { 0xaaaab936dee0, 0xfffff0ef66e0 }
 Entering state 1
 Stack now 0 1
-0xfffffd0cbf70->Object::Object { 0xaaaae6836ee0 }
+0xfffff0ef67c0->Object::Object { 0xaaaab936dee0 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae6836ee0 'a')
--> $$ = nterm item (0xfffffd0cbf70 'a')
-0xaaaae6836ee0->Object::~Object { 0xaaaae6836ee0, 0xfffffd0cbf70 }
-0xaaaae6836ee0->Object::Object { 0xfffffd0cbf70 }
-0xfffffd0cbf70->Object::~Object { 0xaaaae6836ee0, 0xfffffd0cbf70 }
+   $1 = token 'a' (0xaaaab936dee0 'a')
+-> $$ = nterm item (0xfffff0ef67c0 'a')
+0xaaaab936dee0->Object::~Object { 0xaaaab936dee0, 0xfffff0ef67c0 }
+0xaaaab936dee0->Object::Object { 0xfffff0ef67c0 }
+0xfffff0ef6770->Object::Object { 0xaaaab936dee0, 0xfffff0ef67c0 }
+0xfffff0ef6770->Object::~Object { 0xaaaab936dee0, 0xfffff0ef6770, 0xfffff0ef67c0 }
+0xfffff0ef67c0->Object::~Object { 0xaaaab936dee0, 0xfffff0ef67c0 }
 Entering state 10
 Stack now 0 10
 Reading a token
-0xfffffd0cbe60->Object::Object { 0xaaaae6836ee0 }
-0xfffffd0cbf50->Object::Object { 0xaaaae6836ee0, 0xfffffd0cbe60 }
-0xfffffd0cbe60->Object::~Object { 0xaaaae6836ee0, 0xfffffd0cbe60, 0xfffffd0cbf50 }
-Next token is token 'a' (0xfffffd0cbf50 'a')
-0xfffffd0cbe70->Object::Object { 0xaaaae6836ee0, 0xfffffd0cbf50 }
-0xfffffd0cbf50->Object::~Object { 0xaaaae6836ee0, 0xfffffd0cbe70, 0xfffffd0cbf50 }
-Shifting token 'a' (0xfffffd0cbe70 'a')
-0xaaaae6836f00->Object::Object { 0xaaaae6836ee0, 0xfffffd0cbe70 }
-0xfffffd0cbe70->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xfffffd0cbe70 }
+0xfffff0ef66d0->Object::Object { 0xaaaab936dee0 }
+0xfffff0ef67a0->Object::Object { 0xaaaab936dee0, 0xfffff0ef66d0 }
+0xfffff0ef66d0->Object::~Object { 0xaaaab936dee0, 0xfffff0ef66d0, 0xfffff0ef67a0 }
+Next token is token 'a' (0xfffff0ef67a0 'a')
+0xfffff0ef66e0->Object::Object { 0xaaaab936dee0, 0xfffff0ef67a0 }
+0xfffff0ef6690->Object::Object { 0xaaaab936dee0, 0xfffff0ef66e0, 0xfffff0ef67a0 }
+0xfffff0ef6690->Object::~Object { 0xaaaab936dee0, 0xfffff0ef6690, 0xfffff0ef66e0, 0xfffff0ef67a0 }
+0xfffff0ef67a0->Object::~Object { 0xaaaab936dee0, 0xfffff0ef66e0, 0xfffff0ef67a0 }
+Shifting token 'a' (0xfffff0ef66e0 'a')
+0xaaaab936df00->Object::Object { 0xaaaab936dee0, 0xfffff0ef66e0 }
+0xfffff0ef6668->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef66e0 }
+0xfffff0ef6668->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef6668, 0xfffff0ef66e0 }
+0xfffff0ef66e0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef66e0 }
 Entering state 1
 Stack now 0 10 1
-0xfffffd0cbf70->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00 }
+0xfffff0ef67c0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae6836f00 'a')
--> $$ = nterm item (0xfffffd0cbf70 'a')
-0xaaaae6836f00->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xfffffd0cbf70 }
-0xaaaae6836f00->Object::Object { 0xaaaae6836ee0, 0xfffffd0cbf70 }
-0xfffffd0cbf70->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xfffffd0cbf70 }
+   $1 = token 'a' (0xaaaab936df00 'a')
+-> $$ = nterm item (0xfffff0ef67c0 'a')
+0xaaaab936df00->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef67c0 }
+0xaaaab936df00->Object::Object { 0xaaaab936dee0, 0xfffff0ef67c0 }
+0xfffff0ef6770->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef67c0 }
+0xfffff0ef6770->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef6770, 0xfffff0ef67c0 }
+0xfffff0ef67c0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef67c0 }
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xfffffd0cbe60->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00 }
-0xfffffd0cbf50->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xfffffd0cbe60 }
-0xfffffd0cbe60->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xfffffd0cbe60, 0xfffffd0cbf50 }
-Next token is token 'a' (0xfffffd0cbf50 'a')
-0xfffffd0cbe70->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xfffffd0cbf50 }
-0xfffffd0cbf50->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xfffffd0cbe70, 0xfffffd0cbf50 }
-Shifting token 'a' (0xfffffd0cbe70 'a')
-0xaaaae6836f20->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xfffffd0cbe70 }
-0xfffffd0cbe70->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xfffffd0cbe70 }
+0xfffff0ef66d0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00 }
+0xfffff0ef67a0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef66d0 }
+0xfffff0ef66d0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef66d0, 0xfffff0ef67a0 }
+Next token is token 'a' (0xfffff0ef67a0 'a')
+0xfffff0ef66e0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef67a0 }
+0xfffff0ef6690->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef66e0, 0xfffff0ef67a0 }
+0xfffff0ef6690->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef6690, 0xfffff0ef66e0, 0xfffff0ef67a0 }
+0xfffff0ef67a0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef66e0, 0xfffff0ef67a0 }
+Shifting token 'a' (0xfffff0ef66e0 'a')
+0xaaaab936df20->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef66e0 }
+0xfffff0ef6668->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef66e0 }
+0xfffff0ef6668->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef6668, 0xfffff0ef66e0 }
+0xfffff0ef66e0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef66e0 }
 Entering state 1
 Stack now 0 10 10 1
-0xfffffd0cbf70->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20 }
+0xfffff0ef67c0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae6836f20 'a')
--> $$ = nterm item (0xfffffd0cbf70 'a')
-0xaaaae6836f20->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xfffffd0cbf70 }
-0xaaaae6836f20->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xfffffd0cbf70 }
-0xfffffd0cbf70->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xfffffd0cbf70 }
+   $1 = token 'a' (0xaaaab936df20 'a')
+-> $$ = nterm item (0xfffff0ef67c0 'a')
+0xaaaab936df20->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef67c0 }
+0xaaaab936df20->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef67c0 }
+0xfffff0ef6770->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef67c0 }
+0xfffff0ef6770->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef6770, 0xfffff0ef67c0 }
+0xfffff0ef67c0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef67c0 }
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xfffffd0cbe60->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20 }
-0xfffffd0cbf50->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xfffffd0cbe60 }
-0xfffffd0cbe60->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xfffffd0cbe60, 0xfffffd0cbf50 }
-Next token is token 'a' (0xfffffd0cbf50 'a')
-0xfffffd0cbe70->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xfffffd0cbf50 }
-0xfffffd0cbf50->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xfffffd0cbe70, 0xfffffd0cbf50 }
-Shifting token 'a' (0xfffffd0cbe70 'a')
-0xaaaae6836f40->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xfffffd0cbe70 }
-0xfffffd0cbe70->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xaaaae6836f40, 0xfffffd0cbe70 }
+0xfffff0ef66d0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20 }
+0xfffff0ef67a0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef66d0 }
+0xfffff0ef66d0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef66d0, 0xfffff0ef67a0 }
+Next token is token 'a' (0xfffff0ef67a0 'a')
+0xfffff0ef66e0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef67a0 }
+0xfffff0ef6690->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef66e0, 0xfffff0ef67a0 }
+0xfffff0ef6690->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef6690, 0xfffff0ef66e0, 0xfffff0ef67a0 }
+0xfffff0ef67a0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef66e0, 0xfffff0ef67a0 }
+Shifting token 'a' (0xfffff0ef66e0 'a')
+0xaaaab936df40->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef66e0 }
+0xfffff0ef6668->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40, 0xfffff0ef66e0 }
+0xfffff0ef6668->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40, 0xfffff0ef6668, 0xfffff0ef66e0 }
+0xfffff0ef66e0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40, 0xfffff0ef66e0 }
 Entering state 1
 Stack now 0 10 10 10 1
-0xfffffd0cbf70->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xaaaae6836f40 }
+0xfffff0ef67c0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae6836f40 'a')
--> $$ = nterm item (0xfffffd0cbf70 'a')
-0xaaaae6836f40->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xaaaae6836f40, 0xfffffd0cbf70 }
-0xaaaae6836f40->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xfffffd0cbf70 }
-0xfffffd0cbf70->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xaaaae6836f40, 0xfffffd0cbf70 }
+   $1 = token 'a' (0xaaaab936df40 'a')
+-> $$ = nterm item (0xfffff0ef67c0 'a')
+0xaaaab936df40->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40, 0xfffff0ef67c0 }
+0xaaaab936df40->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef67c0 }
+0xfffff0ef6770->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40, 0xfffff0ef67c0 }
+0xfffff0ef6770->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40, 0xfffff0ef6770, 0xfffff0ef67c0 }
+0xfffff0ef67c0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40, 0xfffff0ef67c0 }
+Entering state 10
+Stack now 0 10 10 10 10
+Reading a token
+0xfffff0ef66d0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40 }
+0xfffff0ef67a0->Object::Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40, 0xfffff0ef66d0 }
+0xfffff0ef66d0->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40, 0xfffff0ef66d0, 0xfffff0ef67a0 }
+Next token is token 'p' (0xfffff0ef67a0 'p'Exception caught: cleaning lookahead and stack
+0xaaaab936df40->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xaaaab936df40, 0xfffff0ef67a0 }
+0xaaaab936df20->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xaaaab936df20, 0xfffff0ef67a0 }
+0xaaaab936df00->Object::~Object { 0xaaaab936dee0, 0xaaaab936df00, 0xfffff0ef67a0 }
+0xaaaab936dee0->Object::~Object { 0xaaaab936dee0, 0xfffff0ef67a0 }
+0xfffff0ef67a0->Object::~Object { 0xfffff0ef67a0 }
+exception caught: printer
+end { }
+./c++.at:1363: grep '^exception caught: printer$' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+0xaaaad630ab40->Object::Object { }
+Next token is token 'a' (0xaaaad630ab40 'a')
+Shifting token 'a' (0xaaaad630ab40 'a')
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaaad630ab40 'a')
+-> $$ = nterm item (0xaaaad630ab40 'a')
+Entering state 10
+Stack now 0 10
+Reading a token
+0xaaaad630ab90->Object::Object { 0xaaaad630ab40 }
+Next token is token 'a' (0xaaaad630ab90 'a')
+Shifting token 'a' (0xaaaad630ab90 'a')
+Entering state 1
+Stack now 0 10 1
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaaad630ab90 'a')
+-> $$ = nterm item (0xaaaad630ab90 'a')
+Entering state 10
+Stack now 0 10 10
+Reading a token
+0xaaaad630abe0->Object::Object { 0xaaaad630ab40, 0xaaaad630ab90 }
+Next token is token 'a' (0xaaaad630abe0 'a')
+Shifting token 'a' (0xaaaad630abe0 'a')
+Entering state 1
+Stack now 0 10 10 1
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaaad630abe0 'a')
+-> $$ = nterm item (0xaaaad630abe0 'a')
+Entering state 10
+Stack now 0 10 10 10
+Reading a token
+0xaaaad630ac30->Object::Object { 0xaaaad630ab40, 0xaaaad630ab90, 0xaaaad630abe0 }
+Next token is token 'a' (0xaaaad630ac30 'a')
+Shifting token 'a' (0xaaaad630ac30 'a')
+Entering state 1
+Stack now 0 10 10 10 1
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaaad630ac30 'a')
+-> $$ = nterm item (0xaaaad630ac30 'a')
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xfffffd0cbe60->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xaaaae6836f40 }
-0xfffffd0cbf50->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xaaaae6836f40, 0xfffffd0cbe60 }
-0xfffffd0cbe60->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xaaaae6836f40, 0xfffffd0cbe60, 0xfffffd0cbf50 }
-Next token is token 'p' (0xfffffd0cbf50 'p'Exception caught: cleaning lookahead and stack
-0xaaaae6836f40->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xaaaae6836f40, 0xfffffd0cbf50 }
-0xaaaae6836f20->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xfffffd0cbf50 }
-0xaaaae6836f00->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xfffffd0cbf50 }
-0xaaaae6836ee0->Object::~Object { 0xaaaae6836ee0, 0xfffffd0cbf50 }
-0xfffffd0cbf50->Object::~Object { 0xfffffd0cbf50 }
+0xaaaad630ac80->Object::Object { 0xaaaad630ab40, 0xaaaad630ab90, 0xaaaad630abe0, 0xaaaad630ac30 }
+Next token is token 'p' (0xaaaad630ac80 'p'Exception caught: cleaning lookahead and stack
+0xaaaad630ac80->Object::~Object { 0xaaaad630ab40, 0xaaaad630ab90, 0xaaaad630abe0, 0xaaaad630ac30, 0xaaaad630ac80 }
+0xaaaad630ac30->Object::~Object { 0xaaaad630ab40, 0xaaaad630ab90, 0xaaaad630abe0, 0xaaaad630ac30 }
+0xaaaad630abe0->Object::~Object { 0xaaaad630ab40, 0xaaaad630ab90, 0xaaaad630abe0 }
+0xaaaad630ab90->Object::~Object { 0xaaaad630ab40, 0xaaaad630ab90 }
+0xaaaad630ab40->Object::~Object { 0xaaaad630ab40 }
 exception caught: printer
 end { }
+stdout:
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+exception caught: printer
+./c++.at:1363:  $PREPARSER ./input aaaae
+stdout:
+./c++.at:1360:  $PREPARSER ./input aaaas
+stderr:
+exception caught: reduction
+./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+exception caught: syntax error
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-0xfffffd0cbe60->Object::Object { }
-0xfffffd0cbf50->Object::Object { 0xfffffd0cbe60 }
-0xfffffd0cbe60->Object::~Object { 0xfffffd0cbe60, 0xfffffd0cbf50 }
-Next token is token 'a' (0xfffffd0cbf50 'a')
-0xfffffd0cbe70->Object::Object { 0xfffffd0cbf50 }
-0xfffffd0cbf50->Object::~Object { 0xfffffd0cbe70, 0xfffffd0cbf50 }
-Shifting token 'a' (0xfffffd0cbe70 'a')
-0xaaaae6836ee0->Object::Object { 0xfffffd0cbe70 }
-0xfffffd0cbe70->Object::~Object { 0xaaaae6836ee0, 0xfffffd0cbe70 }
+0xaaaad630ab40->Object::Object { }
+Next token is token 'a' (0xaaaad630ab40 'a')
+Shifting token 'a' (0xaaaad630ab40 'a')
 Entering state 1
 Stack now 0 1
-0xfffffd0cbf70->Object::Object { 0xaaaae6836ee0 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae6836ee0 'a')
--> $$ = nterm item (0xfffffd0cbf70 'a')
-0xaaaae6836ee0->Object::~Object { 0xaaaae6836ee0, 0xfffffd0cbf70 }
-0xaaaae6836ee0->Object::Object { 0xfffffd0cbf70 }
-0xfffffd0cbf70->Object::~Object { 0xaaaae6836ee0, 0xfffffd0cbf70 }
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaaad630ab40 'a')
+-> $$ = nterm item (0xaaaad630ab40 'a')
 Entering state 10
 Stack now 0 10
 Reading a token
-0xfffffd0cbe60->Object::Object { 0xaaaae6836ee0 }
-0xfffffd0cbf50->Object::Object { 0xaaaae6836ee0, 0xfffffd0cbe60 }
-0xfffffd0cbe60->Object::~Object { 0xaaaae6836ee0, 0xfffffd0cbe60, 0xfffffd0cbf50 }
-Next token is token 'a' (0xfffffd0cbf50 'a')
-0xfffffd0cbe70->Object::Object { 0xaaaae6836ee0, 0xfffffd0cbf50 }
-0xfffffd0cbf50->Object::~Object { 0xaaaae6836ee0, 0xfffffd0cbe70, 0xfffffd0cbf50 }
-Shifting token 'a' (0xfffffd0cbe70 'a')
-0xaaaae6836f00->Object::Object { 0xaaaae6836ee0, 0xfffffd0cbe70 }
-0xfffffd0cbe70->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xfffffd0cbe70 }
+0xaaaad630ab90->Object::Object { 0xaaaad630ab40 }
+Next token is token 'a' (0xaaaad630ab90 'a')
+Shifting token 'a' (0xaaaad630ab90 'a')
 Entering state 1
 Stack now 0 10 1
-0xfffffd0cbf70->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae6836f00 'a')
--> $$ = nterm item (0xfffffd0cbf70 'a')
-0xaaaae6836f00->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xfffffd0cbf70 }
-0xaaaae6836f00->Object::Object { 0xaaaae6836ee0, 0xfffffd0cbf70 }
-0xfffffd0cbf70->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xfffffd0cbf70 }
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaaad630ab90 'a')
+-> $$ = nterm item (0xaaaad630ab90 'a')
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xfffffd0cbe60->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00 }
-0xfffffd0cbf50->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xfffffd0cbe60 }
-0xfffffd0cbe60->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xfffffd0cbe60, 0xfffffd0cbf50 }
-Next token is token 'a' (0xfffffd0cbf50 'a')
-0xfffffd0cbe70->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xfffffd0cbf50 }
-0xfffffd0cbf50->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xfffffd0cbe70, 0xfffffd0cbf50 }
-Shifting token 'a' (0xfffffd0cbe70 'a')
-0xaaaae6836f20->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xfffffd0cbe70 }
-0xfffffd0cbe70->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xfffffd0cbe70 }
+0xaaaad630abe0->Object::Object { 0xaaaad630ab40, 0xaaaad630ab90 }
+Next token is token 'a' (0xaaaad630abe0 'a')
+Shifting token 'a' (0xaaaad630abe0 'a')
 Entering state 1
 Stack now 0 10 10 1
-0xfffffd0cbf70->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae6836f20 'a')
--> $$ = nterm item (0xfffffd0cbf70 'a')
-0xaaaae6836f20->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xfffffd0cbf70 }
-0xaaaae6836f20->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xfffffd0cbf70 }
-0xfffffd0cbf70->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xfffffd0cbf70 }
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaaad630abe0 'a')
+-> $$ = nterm item (0xaaaad630abe0 'a')
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xfffffd0cbe60->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20 }
-0xfffffd0cbf50->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xfffffd0cbe60 }
-0xfffffd0cbe60->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xfffffd0cbe60, 0xfffffd0cbf50 }
-Next token is token 'a' (0xfffffd0cbf50 'a')
-0xfffffd0cbe70->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xfffffd0cbf50 }
-0xfffffd0cbf50->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xfffffd0cbe70, 0xfffffd0cbf50 }
-Shifting token 'a' (0xfffffd0cbe70 'a')
-0xaaaae6836f40->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xfffffd0cbe70 }
-0xfffffd0cbe70->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xaaaae6836f40, 0xfffffd0cbe70 }
+0xaaaad630ac30->Object::Object { 0xaaaad630ab40, 0xaaaad630ab90, 0xaaaad630abe0 }
+Next token is token 'a' (0xaaaad630ac30 'a')
+Shifting token 'a' (0xaaaad630ac30 'a')
 Entering state 1
 Stack now 0 10 10 10 1
-0xfffffd0cbf70->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xaaaae6836f40 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae6836f40 'a')
--> $$ = nterm item (0xfffffd0cbf70 'a')
-0xaaaae6836f40->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xaaaae6836f40, 0xfffffd0cbf70 }
-0xaaaae6836f40->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xfffffd0cbf70 }
-0xfffffd0cbf70->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xaaaae6836f40, 0xfffffd0cbf70 }
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaaad630ac30 'a')
+-> $$ = nterm item (0xaaaad630ac30 'a')
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xfffffd0cbe60->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xaaaae6836f40 }
-0xfffffd0cbf50->Object::Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xaaaae6836f40, 0xfffffd0cbe60 }
-0xfffffd0cbe60->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xaaaae6836f40, 0xfffffd0cbe60, 0xfffffd0cbf50 }
-Next token is token 'p' (0xfffffd0cbf50 'p'Exception caught: cleaning lookahead and stack
-0xaaaae6836f40->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xaaaae6836f40, 0xfffffd0cbf50 }
-0xaaaae6836f20->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xaaaae6836f20, 0xfffffd0cbf50 }
-0xaaaae6836f00->Object::~Object { 0xaaaae6836ee0, 0xaaaae6836f00, 0xfffffd0cbf50 }
-0xaaaae6836ee0->Object::~Object { 0xaaaae6836ee0, 0xfffffd0cbf50 }
-0xfffffd0cbf50->Object::~Object { 0xfffffd0cbf50 }
+0xaaaad630ac80->Object::Object { 0xaaaad630ab40, 0xaaaad630ab90, 0xaaaad630abe0, 0xaaaad630ac30 }
+Next token is token 'p' (0xaaaad630ac80 'p'Exception caught: cleaning lookahead and stack
+0xaaaad630ac80->Object::~Object { 0xaaaad630ab40, 0xaaaad630ab90, 0xaaaad630abe0, 0xaaaad630ac30, 0xaaaad630ac80 }
+0xaaaad630ac30->Object::~Object { 0xaaaad630ab40, 0xaaaad630ab90, 0xaaaad630abe0, 0xaaaad630ac30 }
+0xaaaad630abe0->Object::~Object { 0xaaaad630ab40, 0xaaaad630ab90, 0xaaaad630abe0 }
+0xaaaad630ab90->Object::~Object { 0xaaaad630ab40, 0xaaaad630ab90 }
+0xaaaad630ab40->Object::~Object { 0xaaaad630ab40 }
 exception caught: printer
 end { }
-./c++.at:1363: grep '^exception caught: printer$' stderr
+./c++.at:1361: grep '^exception caught: printer$' stderr
+./c++.at:1360:  $PREPARSER ./input aaaal
 stdout:
 exception caught: printer
-./c++.at:1363:  $PREPARSER ./input aaaae
-stderr:
-exception caught: syntax error
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1361:  $PREPARSER ./input aaaae
 ./c++.at:1363:  $PREPARSER ./input aaaaE
 stderr:
-exception caught: syntax error, unexpected end of file, expecting 'a'
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input aaaaT
+exception caught: yylex
+./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input aaaaR
 stderr:
+exception caught: syntax error, unexpected end of file, expecting 'a'
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:1363: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./c++.at:1360:  $PREPARSER ./input aaaas
-stderr:
-exception caught: reduction
-./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1360:  $PREPARSER ./input aaaal
-stderr:
-exception caught: yylex
-./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+exception caught: syntax error
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1360:  $PREPARSER ./input i
 stderr:
+./c++.at:1361:  $PREPARSER ./input aaaaE
 exception caught: initial-action
+./c++.at:1363:  $PREPARSER ./input aaaaT
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stderr:
+exception caught: syntax error, unexpected end of file, expecting 'a'
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1360:  $PREPARSER ./input aaaap
+./c++.at:1361:  $PREPARSER ./input aaaaT
+stderr:
 stderr:
+./c++.at:1363:  $PREPARSER ./input aaaaR
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1361:  $PREPARSER ./input aaaaR
 ./c++.at:1360:  $PREPARSER ./input --debug aaaap
 stderr:
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:1363: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-0xaaab02cf6b40->Object::Object { }
-Next token is token 'a' (0xaaab02cf6b40 'a')
-Shifting token 'a' (0xaaab02cf6b40 'a')
+0xaaaaee2b3b40->Object::Object { }
+Next token is token 'a' (0xaaaaee2b3b40 'a')
+Shifting token 'a' (0xaaaaee2b3b40 'a')
 Entering state 2
 Stack now 0 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab02cf6b40 'a')
--> $$ = nterm item (0xaaab02cf6b40 'a')
+   $1 = token 'a' (0xaaaaee2b3b40 'a')
+-> $$ = nterm item (0xaaaaee2b3b40 'a')
 Entering state 11
 Stack now 0 11
 Reading a token
-0xaaab02cf6b90->Object::Object { 0xaaab02cf6b40 }
-Next token is token 'a' (0xaaab02cf6b90 'a')
-Shifting token 'a' (0xaaab02cf6b90 'a')
+0xaaaaee2b3b90->Object::Object { 0xaaaaee2b3b40 }
+Next token is token 'a' (0xaaaaee2b3b90 'a')
+Shifting token 'a' (0xaaaaee2b3b90 'a')
 Entering state 2
 Stack now 0 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab02cf6b90 'a')
--> $$ = nterm item (0xaaab02cf6b90 'a')
+   $1 = token 'a' (0xaaaaee2b3b90 'a')
+-> $$ = nterm item (0xaaaaee2b3b90 'a')
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xaaab02cf6be0->Object::Object { 0xaaab02cf6b40, 0xaaab02cf6b90 }
-Next token is token 'a' (0xaaab02cf6be0 'a')
-Shifting token 'a' (0xaaab02cf6be0 'a')
+0xaaaaee2b3be0->Object::Object { 0xaaaaee2b3b40, 0xaaaaee2b3b90 }
+Next token is token 'a' (0xaaaaee2b3be0 'a')
+Shifting token 'a' (0xaaaaee2b3be0 'a')
 Entering state 2
 Stack now 0 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab02cf6be0 'a')
--> $$ = nterm item (0xaaab02cf6be0 'a')
+   $1 = token 'a' (0xaaaaee2b3be0 'a')
+-> $$ = nterm item (0xaaaaee2b3be0 'a')
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xaaab02cf6c30->Object::Object { 0xaaab02cf6b40, 0xaaab02cf6b90, 0xaaab02cf6be0 }
-Next token is token 'a' (0xaaab02cf6c30 'a')
-Shifting token 'a' (0xaaab02cf6c30 'a')
+0xaaaaee2b3c30->Object::Object { 0xaaaaee2b3b40, 0xaaaaee2b3b90, 0xaaaaee2b3be0 }
+Next token is token 'a' (0xaaaaee2b3c30 'a')
+Shifting token 'a' (0xaaaaee2b3c30 'a')
 Entering state 2
 Stack now 0 11 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab02cf6c30 'a')
--> $$ = nterm item (0xaaab02cf6c30 'a')
+   $1 = token 'a' (0xaaaaee2b3c30 'a')
+-> $$ = nterm item (0xaaaaee2b3c30 'a')
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xaaab02cf6c80->Object::Object { 0xaaab02cf6b40, 0xaaab02cf6b90, 0xaaab02cf6be0, 0xaaab02cf6c30 }
-Next token is token 'p' (0xaaab02cf6c80 'p'Exception caught: cleaning lookahead and stack
-0xaaab02cf6c80->Object::~Object { 0xaaab02cf6b40, 0xaaab02cf6b90, 0xaaab02cf6be0, 0xaaab02cf6c30, 0xaaab02cf6c80 }
-0xaaab02cf6c30->Object::~Object { 0xaaab02cf6b40, 0xaaab02cf6b90, 0xaaab02cf6be0, 0xaaab02cf6c30 }
-0xaaab02cf6be0->Object::~Object { 0xaaab02cf6b40, 0xaaab02cf6b90, 0xaaab02cf6be0 }
-0xaaab02cf6b90->Object::~Object { 0xaaab02cf6b40, 0xaaab02cf6b90 }
-0xaaab02cf6b40->Object::~Object { 0xaaab02cf6b40 }
+0xaaaaee2b3c80->Object::Object { 0xaaaaee2b3b40, 0xaaaaee2b3b90, 0xaaaaee2b3be0, 0xaaaaee2b3c30 }
+Next token is token 'p' (0xaaaaee2b3c80 'p'Exception caught: cleaning lookahead and stack
+0xaaaaee2b3c80->Object::~Object { 0xaaaaee2b3b40, 0xaaaaee2b3b90, 0xaaaaee2b3be0, 0xaaaaee2b3c30, 0xaaaaee2b3c80 }
+0xaaaaee2b3c30->Object::~Object { 0xaaaaee2b3b40, 0xaaaaee2b3b90, 0xaaaaee2b3be0, 0xaaaaee2b3c30 }
+0xaaaaee2b3be0->Object::~Object { 0xaaaaee2b3b40, 0xaaaaee2b3b90, 0xaaaaee2b3be0 }
+0xaaaaee2b3b90->Object::~Object { 0xaaaaee2b3b40, 0xaaaaee2b3b90 }
+0xaaaaee2b3b40->Object::~Object { 0xaaaaee2b3b40 }
 exception caught: printer
 end { }
 ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
 stderr:
+./c++.at:1361: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-0xaaab02cf6b40->Object::Object { }
-Next token is token 'a' (0xaaab02cf6b40 'a')
-Shifting token 'a' (0xaaab02cf6b40 'a')
+0xaaaaee2b3b40->Object::Object { }
+Next token is token 'a' (0xaaaaee2b3b40 'a')
+Shifting token 'a' (0xaaaaee2b3b40 'a')
 Entering state 2
 Stack now 0 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab02cf6b40 'a')
--> $$ = nterm item (0xaaab02cf6b40 'a')
+   $1 = token 'a' (0xaaaaee2b3b40 'a')
+-> $$ = nterm item (0xaaaaee2b3b40 'a')
 Entering state 11
 Stack now 0 11
 Reading a token
-0xaaab02cf6b90->Object::Object { 0xaaab02cf6b40 }
-Next token is token 'a' (0xaaab02cf6b90 'a')
-Shifting token 'a' (0xaaab02cf6b90 'a')
+0xaaaaee2b3b90->Object::Object { 0xaaaaee2b3b40 }
+Next token is token 'a' (0xaaaaee2b3b90 'a')
+Shifting token 'a' (0xaaaaee2b3b90 'a')
 Entering state 2
 Stack now 0 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab02cf6b90 'a')
--> $$ = nterm item (0xaaab02cf6b90 'a')
+   $1 = token 'a' (0xaaaaee2b3b90 'a')
+-> $$ = nterm item (0xaaaaee2b3b90 'a')
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xaaab02cf6be0->Object::Object { 0xaaab02cf6b40, 0xaaab02cf6b90 }
-Next token is token 'a' (0xaaab02cf6be0 'a')
-Shifting token 'a' (0xaaab02cf6be0 'a')
+0xaaaaee2b3be0->Object::Object { 0xaaaaee2b3b40, 0xaaaaee2b3b90 }
+Next token is token 'a' (0xaaaaee2b3be0 'a')
+Shifting token 'a' (0xaaaaee2b3be0 'a')
 Entering state 2
 Stack now 0 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab02cf6be0 'a')
--> $$ = nterm item (0xaaab02cf6be0 'a')
+   $1 = token 'a' (0xaaaaee2b3be0 'a')
+-> $$ = nterm item (0xaaaaee2b3be0 'a')
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xaaab02cf6c30->Object::Object { 0xaaab02cf6b40, 0xaaab02cf6b90, 0xaaab02cf6be0 }
-Next token is token 'a' (0xaaab02cf6c30 'a')
-Shifting token 'a' (0xaaab02cf6c30 'a')
+0xaaaaee2b3c30->Object::Object { 0xaaaaee2b3b40, 0xaaaaee2b3b90, 0xaaaaee2b3be0 }
+Next token is token 'a' (0xaaaaee2b3c30 'a')
+Shifting token 'a' (0xaaaaee2b3c30 'a')
 Entering state 2
 Stack now 0 11 11 11 2
 Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab02cf6c30 'a')
--> $$ = nterm item (0xaaab02cf6c30 'a')
+   $1 = token 'a' (0xaaaaee2b3c30 'a')
+-> $$ = nterm item (0xaaaaee2b3c30 'a')
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xaaab02cf6c80->Object::Object { 0xaaab02cf6b40, 0xaaab02cf6b90, 0xaaab02cf6be0, 0xaaab02cf6c30 }
-Next token is token 'p' (0xaaab02cf6c80 'p'Exception caught: cleaning lookahead and stack
-0xaaab02cf6c80->Object::~Object { 0xaaab02cf6b40, 0xaaab02cf6b90, 0xaaab02cf6be0, 0xaaab02cf6c30, 0xaaab02cf6c80 }
-0xaaab02cf6c30->Object::~Object { 0xaaab02cf6b40, 0xaaab02cf6b90, 0xaaab02cf6be0, 0xaaab02cf6c30 }
-0xaaab02cf6be0->Object::~Object { 0xaaab02cf6b40, 0xaaab02cf6b90, 0xaaab02cf6be0 }
-0xaaab02cf6b90->Object::~Object { 0xaaab02cf6b40, 0xaaab02cf6b90 }
-0xaaab02cf6b40->Object::~Object { 0xaaab02cf6b40 }
+0xaaaaee2b3c80->Object::Object { 0xaaaaee2b3b40, 0xaaaaee2b3b90, 0xaaaaee2b3be0, 0xaaaaee2b3c30 }
+Next token is token 'p' (0xaaaaee2b3c80 'p'Exception caught: cleaning lookahead and stack
+0xaaaaee2b3c80->Object::~Object { 0xaaaaee2b3b40, 0xaaaaee2b3b90, 0xaaaaee2b3be0, 0xaaaaee2b3c30, 0xaaaaee2b3c80 }
+0xaaaaee2b3c30->Object::~Object { 0xaaaaee2b3b40, 0xaaaaee2b3b90, 0xaaaaee2b3be0, 0xaaaaee2b3c30 }
+0xaaaaee2b3be0->Object::~Object { 0xaaaaee2b3b40, 0xaaaaee2b3b90, 0xaaaaee2b3be0 }
+0xaaaaee2b3b90->Object::~Object { 0xaaaaee2b3b40, 0xaaaaee2b3b90 }
+0xaaaaee2b3b40->Object::~Object { 0xaaaaee2b3b40 }
 exception caught: printer
 end { }
 ./c++.at:1360: grep '^exception caught: printer$' stderr
@@ -256342,202 +256683,466 @@
 ./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -o test.cc test.y
 stderr:
 stdout:
+./c++.at:1502:  $PREPARSER ./parser
+stderr:
+./c++.at:1502: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1555: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+690. c++.at:1422:  ok
+
+692. java.at:25: testing Java invalid directives ...
+./java.at:35: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret YYParser.y
+./java.at:50: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret YYParser.y
+stderr:
+stdout:
 ./c++.at:858:  $PREPARSER ./input
 stderr:
 ./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:858: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-./c++.at:1555: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+692. java.at:25:  ok
+
+693. java.at:186: testing Java parser class and package names ...
+./java.at:188: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated YYParser.y
+./java.at:188: grep '[mb]4_' YYParser.y
+stdout:
+693. java.at:186:  skipped (java.at:188)
+
+694. java.at:217: testing Java parser class modifiers ...
+./java.at:219: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated YYParser.y
+./java.at:219: grep '[mb]4_' YYParser.y
+stdout:
+694. java.at:217:  skipped (java.at:219)
+
+695. java.at:287: testing Java parser class extends and implements ...
+./java.at:289: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated YYParser.y
+./java.at:289: grep '[mb]4_' YYParser.y
+stdout:
+695. java.at:287:  skipped (java.at:289)
+
+696. java.at:307: testing Java %parse-param and %lex-param ...
+./java.at:309: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated YYParser.y
 stderr:
 stdout:
 ======== Testing with C++ standard flags: ''
 ./c++.at:1411: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./java.at:309: grep '[mb]4_' YYParser.y
+stdout:
+696. java.at:307:  skipped (java.at:309)
+
+697. java.at:381: testing Java throws specifications ...
+./java.at:441: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated YYParser.y
+./java.at:441: grep '[mb]4_' YYParser.y
+stdout:
+697. java.at:381:  skipped (java.at:441)
+
 stderr:
 stdout:
-./c++.at:1501: $CXX $CPPFLAGS  $CXXFLAGS -Iinclude $LDFLAGS -o parser x[12].o main.cc $LIBS
+./c++.at:1066:  $PREPARSER ./input < in
+stderr:
+error: invalid expression
+caught error
+error: invalid character
+caught error
+./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1066:  $PREPARSER ./input < in
+stderr:
+error: invalid expression
+./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1066:  $PREPARSER ./input < in
+698. java.at:470: testing Java constructor init and init_throws ...
+stderr:
+./java.at:475: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated YYParser.y
+error: invalid character
+./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+./java.at:475: grep '[mb]4_' YYParser.y
+stdout:
+698. java.at:470:  skipped (java.at:475)
+
+699. java.at:497: testing Java value, position, and location types ...
+./java.at:499: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated YYParser.y
+stderr:
+stdout:
+./c++.at:1066: ./check
+./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
+./java.at:499: grep '[mb]4_' YYParser.y
+stdout:
+699. java.at:497:  skipped (java.at:499)
+
+700. java.at:528: testing Java syntax error handling without error token ...
+./java.at:579: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret YYParser.y
+700. java.at:528:  skipped (java.at:580)
+
+701. javapush.at:172: testing Trivial Push Parser with api.push-pull verification ...
+./javapush.at:181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dapi.push-pull=pull -o Main.java input.y
+./javapush.at:182: grep -c '^.*public boolean parse().*$' Main.java
+./javapush.at:187: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java
+./javapush.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dapi.push-pull=both -o Main.java input.y
+./javapush.at:192: grep -c '^.*public boolean parse().*$' Main.java
+./javapush.at:195: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java
+./javapush.at:199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dapi.push-pull=push -o Main.java input.y
+./javapush.at:200: grep -c '^.*public boolean parse().*$' Main.java
+./javapush.at:203: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java
+701. javapush.at:172:  skipped (javapush.at:207)
+
+702. javapush.at:217: testing Trivial Push Parser with %initial-action ...
+./javapush.at:227: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dapi.push-pull=push -o Main.java input.y
+./javapush.at:228: grep -c '^System.err.println("Initial action invoked");$' Main.java
+702. javapush.at:217:  skipped (javapush.at:230)
+
 stderr:
 stdout:
 ./c++.at:858:  $PREPARSER ./input
+703. d.at:103: testing D parser class extends and implements ...
+./d.at:106: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated YYParser.y
 stderr:
 ./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:858: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./d.at:106: grep '[mb]4_' YYParser.y
+stdout:
+703. d.at:103:  skipped (d.at:106)
+
 stderr:
 stdout:
-./c++.at:1361:  $PREPARSER ./input aaaas
+./c++.at:1555:  $PREPARSER ./test
+stderr:
+./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:1555: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+704. d.at:138: testing D parser class api.token.raw true by default ...
+./d.at:141: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated YYParser.y
+./d.at:141: grep '[mb]4_' YYParser.y
+stdout:
+704. d.at:138:  skipped (d.at:141)
+
+stderr:
+stdout:
+./c++.at:1555: ./check
+-std=c++98 not supported
+======== Testing with C++ standard flags: ''
+./c++.at:1555: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+705. cxx-type.at:409: testing GLR: Resolve ambiguity, impure, no locations ...
+./cxx-type.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o types.c types.y
+stderr:
+types.y:77.8-37: warning: unset value: $$ [-Wother]
+types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr]
+types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+./cxx-type.at:410: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o types types.c $LIBS
+stderr:
+stdout:
+./c++.at:1555: ./check
+-std=c++03 not supported
+======== Testing with C++ standard flags: ''
+./c++.at:1555: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+stderr:
+stdout:
+./c++.at:1555: ./check
+-std=c++11 not supported
+======== Testing with C++ standard flags: ''
+./c++.at:1555: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+stderr:
+stdout:
+./c++.at:1363:  $PREPARSER ./input aaaas
 stderr:
 exception caught: reduction
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1361:  $PREPARSER ./input aaaal
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input aaaal
 stderr:
 exception caught: yylex
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1361:  $PREPARSER ./input i
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input i
 stderr:
 exception caught: initial-action
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1361:  $PREPARSER ./input aaaap
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input aaaap
 stderr:
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 stdout:
-./c++.at:1502:  $PREPARSER ./parser
-./c++.at:1361:  $PREPARSER ./input --debug aaaap
-stderr:
+./c++.at:1363:  $PREPARSER ./input --debug aaaap
+./c++.at:1555: ./check
+./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -o test.cc test.y
 stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-0xaaaad9c0eb40->Object::Object { }
-Next token is token 'a' (0xaaaad9c0eb40 'a')
-Shifting token 'a' (0xaaaad9c0eb40 'a')
+0xffffc2b5ce80->Object::Object { }
+0xffffc2b5cf50->Object::Object { 0xffffc2b5ce80 }
+0xffffc2b5ce80->Object::~Object { 0xffffc2b5ce80, 0xffffc2b5cf50 }
+Next token is token 'a' (0xffffc2b5cf50 'a')
+0xffffc2b5ce90->Object::Object { 0xffffc2b5cf50 }
+0xffffc2b5ce40->Object::Object { 0xffffc2b5ce90, 0xffffc2b5cf50 }
+0xffffc2b5ce40->Object::~Object { 0xffffc2b5ce40, 0xffffc2b5ce90, 0xffffc2b5cf50 }
+0xffffc2b5cf50->Object::~Object { 0xffffc2b5ce90, 0xffffc2b5cf50 }
+Shifting token 'a' (0xffffc2b5ce90 'a')
+0xaaaaeb8efee0->Object::Object { 0xffffc2b5ce90 }
+0xffffc2b5ce18->Object::Object { 0xaaaaeb8efee0, 0xffffc2b5ce90 }
+0xffffc2b5ce18->Object::~Object { 0xaaaaeb8efee0, 0xffffc2b5ce18, 0xffffc2b5ce90 }
+0xffffc2b5ce90->Object::~Object { 0xaaaaeb8efee0, 0xffffc2b5ce90 }
 Entering state 1
 Stack now 0 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaad9c0eb40 'a')
--> $$ = nterm item (0xaaaad9c0eb40 'a')
+0xffffc2b5cf70->Object::Object { 0xaaaaeb8efee0 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaeb8efee0 'a')
+-> $$ = nterm item (0xffffc2b5cf70 'a')
+0xaaaaeb8efee0->Object::~Object { 0xaaaaeb8efee0, 0xffffc2b5cf70 }
+0xaaaaeb8efee0->Object::Object { 0xffffc2b5cf70 }
+0xffffc2b5cf20->Object::Object { 0xaaaaeb8efee0, 0xffffc2b5cf70 }
+0xffffc2b5cf20->Object::~Object { 0xaaaaeb8efee0, 0xffffc2b5cf20, 0xffffc2b5cf70 }
+0xffffc2b5cf70->Object::~Object { 0xaaaaeb8efee0, 0xffffc2b5cf70 }
 Entering state 10
 Stack now 0 10
 Reading a token
-0xaaaad9c0eb90->Object::Object { 0xaaaad9c0eb40 }
-Next token is token 'a' (0xaaaad9c0eb90 'a')
-Shifting token 'a' (0xaaaad9c0eb90 'a')
+0xffffc2b5ce80->Object::Object { 0xaaaaeb8efee0 }
+0xffffc2b5cf50->Object::Object { 0xaaaaeb8efee0, 0xffffc2b5ce80 }
+0xffffc2b5ce80->Object::~Object { 0xaaaaeb8efee0, 0xffffc2b5ce80, 0xffffc2b5cf50 }
+Next token is token 'a' (0xffffc2b5cf50 'a')
+0xffffc2b5ce90->Object::Object { 0xaaaaeb8efee0, 0xffffc2b5cf50 }
+0xffffc2b5ce40->Object::Object { 0xaaaaeb8efee0, 0xffffc2b5ce90, 0xffffc2b5cf50 }
+0xffffc2b5ce40->Object::~Object { 0xaaaaeb8efee0, 0xffffc2b5ce40, 0xffffc2b5ce90, 0xffffc2b5cf50 }
+0xffffc2b5cf50->Object::~Object { 0xaaaaeb8efee0, 0xffffc2b5ce90, 0xffffc2b5cf50 }
+Shifting token 'a' (0xffffc2b5ce90 'a')
+0xaaaaeb8eff00->Object::Object { 0xaaaaeb8efee0, 0xffffc2b5ce90 }
+0xffffc2b5ce18->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5ce90 }
+0xffffc2b5ce18->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5ce18, 0xffffc2b5ce90 }
+0xffffc2b5ce90->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5ce90 }
 Entering state 1
 Stack now 0 10 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaad9c0eb90 'a')
--> $$ = nterm item (0xaaaad9c0eb90 'a')
+0xffffc2b5cf70->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaeb8eff00 'a')
+-> $$ = nterm item (0xffffc2b5cf70 'a')
+0xaaaaeb8eff00->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5cf70 }
+0xaaaaeb8eff00->Object::Object { 0xaaaaeb8efee0, 0xffffc2b5cf70 }
+0xffffc2b5cf20->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5cf70 }
+0xffffc2b5cf20->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5cf20, 0xffffc2b5cf70 }
+0xffffc2b5cf70->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5cf70 }
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xaaaad9c0ebe0->Object::Object { 0xaaaad9c0eb40, 0xaaaad9c0eb90 }
-Next token is token 'a' (0xaaaad9c0ebe0 'a')
-Shifting token 'a' (0xaaaad9c0ebe0 'a')
+0xffffc2b5ce80->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00 }
+0xffffc2b5cf50->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5ce80 }
+0xffffc2b5ce80->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5ce80, 0xffffc2b5cf50 }
+Next token is token 'a' (0xffffc2b5cf50 'a')
+0xffffc2b5ce90->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5cf50 }
+0xffffc2b5ce40->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5ce90, 0xffffc2b5cf50 }
+0xffffc2b5ce40->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5ce40, 0xffffc2b5ce90, 0xffffc2b5cf50 }
+0xffffc2b5cf50->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5ce90, 0xffffc2b5cf50 }
+Shifting token 'a' (0xffffc2b5ce90 'a')
+0xaaaaeb8eff20->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5ce90 }
+0xffffc2b5ce18->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5ce90 }
+0xffffc2b5ce18->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5ce18, 0xffffc2b5ce90 }
+0xffffc2b5ce90->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5ce90 }
 Entering state 1
 Stack now 0 10 10 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaad9c0ebe0 'a')
--> $$ = nterm item (0xaaaad9c0ebe0 'a')
+0xffffc2b5cf70->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaeb8eff20 'a')
+-> $$ = nterm item (0xffffc2b5cf70 'a')
+0xaaaaeb8eff20->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5cf70 }
+0xaaaaeb8eff20->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5cf70 }
+0xffffc2b5cf20->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5cf70 }
+0xffffc2b5cf20->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5cf20, 0xffffc2b5cf70 }
+0xffffc2b5cf70->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5cf70 }
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xaaaad9c0ec30->Object::Object { 0xaaaad9c0eb40, 0xaaaad9c0eb90, 0xaaaad9c0ebe0 }
-Next token is token 'a' (0xaaaad9c0ec30 'a')
-Shifting token 'a' (0xaaaad9c0ec30 'a')
+0xffffc2b5ce80->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20 }
+0xffffc2b5cf50->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5ce80 }
+0xffffc2b5ce80->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5ce80, 0xffffc2b5cf50 }
+Next token is token 'a' (0xffffc2b5cf50 'a')
+0xffffc2b5ce90->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5cf50 }
+0xffffc2b5ce40->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5ce90, 0xffffc2b5cf50 }
+0xffffc2b5ce40->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5ce40, 0xffffc2b5ce90, 0xffffc2b5cf50 }
+0xffffc2b5cf50->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5ce90, 0xffffc2b5cf50 }
+Shifting token 'a' (0xffffc2b5ce90 'a')
+0xaaaaeb8eff40->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5ce90 }
+0xffffc2b5ce18->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40, 0xffffc2b5ce90 }
+0xffffc2b5ce18->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40, 0xffffc2b5ce18, 0xffffc2b5ce90 }
+0xffffc2b5ce90->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40, 0xffffc2b5ce90 }
 Entering state 1
 Stack now 0 10 10 10 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaad9c0ec30 'a')
--> $$ = nterm item (0xaaaad9c0ec30 'a')
+0xffffc2b5cf70->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaeb8eff40 'a')
+-> $$ = nterm item (0xffffc2b5cf70 'a')
+0xaaaaeb8eff40->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40, 0xffffc2b5cf70 }
+0xaaaaeb8eff40->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5cf70 }
+0xffffc2b5cf20->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40, 0xffffc2b5cf70 }
+0xffffc2b5cf20->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40, 0xffffc2b5cf20, 0xffffc2b5cf70 }
+0xffffc2b5cf70->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40, 0xffffc2b5cf70 }
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xaaaad9c0ec80->Object::Object { 0xaaaad9c0eb40, 0xaaaad9c0eb90, 0xaaaad9c0ebe0, 0xaaaad9c0ec30 }
-Next token is token 'p' (0xaaaad9c0ec80 'p'Exception caught: cleaning lookahead and stack
-0xaaaad9c0ec80->Object::~Object { 0xaaaad9c0eb40, 0xaaaad9c0eb90, 0xaaaad9c0ebe0, 0xaaaad9c0ec30, 0xaaaad9c0ec80 }
-0xaaaad9c0ec30->Object::~Object { 0xaaaad9c0eb40, 0xaaaad9c0eb90, 0xaaaad9c0ebe0, 0xaaaad9c0ec30 }
-0xaaaad9c0ebe0->Object::~Object { 0xaaaad9c0eb40, 0xaaaad9c0eb90, 0xaaaad9c0ebe0 }
-0xaaaad9c0eb90->Object::~Object { 0xaaaad9c0eb40, 0xaaaad9c0eb90 }
-0xaaaad9c0eb40->Object::~Object { 0xaaaad9c0eb40 }
+0xffffc2b5ce80->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40 }
+0xffffc2b5cf50->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40, 0xffffc2b5ce80 }
+0xffffc2b5ce80->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40, 0xffffc2b5ce80, 0xffffc2b5cf50 }
+Next token is token 'p' (0xffffc2b5cf50 'p'Exception caught: cleaning lookahead and stack
+0xaaaaeb8eff40->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40, 0xffffc2b5cf50 }
+0xaaaaeb8eff20->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5cf50 }
+0xaaaaeb8eff00->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5cf50 }
+0xaaaaeb8efee0->Object::~Object { 0xaaaaeb8efee0, 0xffffc2b5cf50 }
+0xffffc2b5cf50->Object::~Object { 0xffffc2b5cf50 }
 exception caught: printer
 end { }
-./c++.at:1502: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-690. c++.at:1422:  ok
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-0xaaaad9c0eb40->Object::Object { }
-Next token is token 'a' (0xaaaad9c0eb40 'a')
-Shifting token 'a' (0xaaaad9c0eb40 'a')
+0xffffc2b5ce80->Object::Object { }
+0xffffc2b5cf50->Object::Object { 0xffffc2b5ce80 }
+0xffffc2b5ce80->Object::~Object { 0xffffc2b5ce80, 0xffffc2b5cf50 }
+Next token is token 'a' (0xffffc2b5cf50 'a')
+0xffffc2b5ce90->Object::Object { 0xffffc2b5cf50 }
+0xffffc2b5ce40->Object::Object { 0xffffc2b5ce90, 0xffffc2b5cf50 }
+0xffffc2b5ce40->Object::~Object { 0xffffc2b5ce40, 0xffffc2b5ce90, 0xffffc2b5cf50 }
+0xffffc2b5cf50->Object::~Object { 0xffffc2b5ce90, 0xffffc2b5cf50 }
+Shifting token 'a' (0xffffc2b5ce90 'a')
+0xaaaaeb8efee0->Object::Object { 0xffffc2b5ce90 }
+0xffffc2b5ce18->Object::Object { 0xaaaaeb8efee0, 0xffffc2b5ce90 }
+0xffffc2b5ce18->Object::~Object { 0xaaaaeb8efee0, 0xffffc2b5ce18, 0xffffc2b5ce90 }
+0xffffc2b5ce90->Object::~Object { 0xaaaaeb8efee0, 0xffffc2b5ce90 }
 Entering state 1
 Stack now 0 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaad9c0eb40 'a')
--> $$ = nterm item (0xaaaad9c0eb40 'a')
+0xffffc2b5cf70->Object::Object { 0xaaaaeb8efee0 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaeb8efee0 'a')
+-> $$ = nterm item (0xffffc2b5cf70 'a')
+0xaaaaeb8efee0->Object::~Object { 0xaaaaeb8efee0, 0xffffc2b5cf70 }
+0xaaaaeb8efee0->Object::Object { 0xffffc2b5cf70 }
+0xffffc2b5cf20->Object::Object { 0xaaaaeb8efee0, 0xffffc2b5cf70 }
+0xffffc2b5cf20->Object::~Object { 0xaaaaeb8efee0, 0xffffc2b5cf20, 0xffffc2b5cf70 }
+0xffffc2b5cf70->Object::~Object { 0xaaaaeb8efee0, 0xffffc2b5cf70 }
 Entering state 10
 Stack now 0 10
 Reading a token
-0xaaaad9c0eb90->Object::Object { 0xaaaad9c0eb40 }
-Next token is token 'a' (0xaaaad9c0eb90 'a')
-Shifting token 'a' (0xaaaad9c0eb90 'a')
+0xffffc2b5ce80->Object::Object { 0xaaaaeb8efee0 }
+0xffffc2b5cf50->Object::Object { 0xaaaaeb8efee0, 0xffffc2b5ce80 }
+0xffffc2b5ce80->Object::~Object { 0xaaaaeb8efee0, 0xffffc2b5ce80, 0xffffc2b5cf50 }
+Next token is token 'a' (0xffffc2b5cf50 'a')
+0xffffc2b5ce90->Object::Object { 0xaaaaeb8efee0, 0xffffc2b5cf50 }
+0xffffc2b5ce40->Object::Object { 0xaaaaeb8efee0, 0xffffc2b5ce90, 0xffffc2b5cf50 }
+0xffffc2b5ce40->Object::~Object { 0xaaaaeb8efee0, 0xffffc2b5ce40, 0xffffc2b5ce90, 0xffffc2b5cf50 }
+0xffffc2b5cf50->Object::~Object { 0xaaaaeb8efee0, 0xffffc2b5ce90, 0xffffc2b5cf50 }
+Shifting token 'a' (0xffffc2b5ce90 'a')
+0xaaaaeb8eff00->Object::Object { 0xaaaaeb8efee0, 0xffffc2b5ce90 }
+0xffffc2b5ce18->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5ce90 }
+0xffffc2b5ce18->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5ce18, 0xffffc2b5ce90 }
+0xffffc2b5ce90->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5ce90 }
 Entering state 1
 Stack now 0 10 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaad9c0eb90 'a')
--> $$ = nterm item (0xaaaad9c0eb90 'a')
+0xffffc2b5cf70->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaeb8eff00 'a')
+-> $$ = nterm item (0xffffc2b5cf70 'a')
+0xaaaaeb8eff00->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5cf70 }
+0xaaaaeb8eff00->Object::Object { 0xaaaaeb8efee0, 0xffffc2b5cf70 }
+0xffffc2b5cf20->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5cf70 }
+0xffffc2b5cf20->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5cf20, 0xffffc2b5cf70 }
+0xffffc2b5cf70->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5cf70 }
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xaaaad9c0ebe0->Object::Object { 0xaaaad9c0eb40, 0xaaaad9c0eb90 }
-Next token is token 'a' (0xaaaad9c0ebe0 'a')
-Shifting token 'a' (0xaaaad9c0ebe0 'a')
+0xffffc2b5ce80->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00 }
+0xffffc2b5cf50->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5ce80 }
+0xffffc2b5ce80->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5ce80, 0xffffc2b5cf50 }
+Next token is token 'a' (0xffffc2b5cf50 'a')
+0xffffc2b5ce90->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5cf50 }
+0xffffc2b5ce40->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5ce90, 0xffffc2b5cf50 }
+0xffffc2b5ce40->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5ce40, 0xffffc2b5ce90, 0xffffc2b5cf50 }
+0xffffc2b5cf50->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5ce90, 0xffffc2b5cf50 }
+Shifting token 'a' (0xffffc2b5ce90 'a')
+0xaaaaeb8eff20->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5ce90 }
+0xffffc2b5ce18->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5ce90 }
+0xffffc2b5ce18->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5ce18, 0xffffc2b5ce90 }
+0xffffc2b5ce90->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5ce90 }
 Entering state 1
 Stack now 0 10 10 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaad9c0ebe0 'a')
--> $$ = nterm item (0xaaaad9c0ebe0 'a')
+0xffffc2b5cf70->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaeb8eff20 'a')
+-> $$ = nterm item (0xffffc2b5cf70 'a')
+0xaaaaeb8eff20->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5cf70 }
+0xaaaaeb8eff20->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5cf70 }
+0xffffc2b5cf20->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5cf70 }
+0xffffc2b5cf20->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5cf20, 0xffffc2b5cf70 }
+0xffffc2b5cf70->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5cf70 }
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xaaaad9c0ec30->Object::Object { 0xaaaad9c0eb40, 0xaaaad9c0eb90, 0xaaaad9c0ebe0 }
-Next token is token 'a' (0xaaaad9c0ec30 'a')
-Shifting token 'a' (0xaaaad9c0ec30 'a')
+0xffffc2b5ce80->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20 }
+0xffffc2b5cf50->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5ce80 }
+0xffffc2b5ce80->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5ce80, 0xffffc2b5cf50 }
+Next token is token 'a' (0xffffc2b5cf50 'a')
+0xffffc2b5ce90->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5cf50 }
+0xffffc2b5ce40->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5ce90, 0xffffc2b5cf50 }
+0xffffc2b5ce40->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5ce40, 0xffffc2b5ce90, 0xffffc2b5cf50 }
+0xffffc2b5cf50->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5ce90, 0xffffc2b5cf50 }
+Shifting token 'a' (0xffffc2b5ce90 'a')
+0xaaaaeb8eff40->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5ce90 }
+0xffffc2b5ce18->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40, 0xffffc2b5ce90 }
+0xffffc2b5ce18->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40, 0xffffc2b5ce18, 0xffffc2b5ce90 }
+0xffffc2b5ce90->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40, 0xffffc2b5ce90 }
 Entering state 1
 Stack now 0 10 10 10 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaaad9c0ec30 'a')
--> $$ = nterm item (0xaaaad9c0ec30 'a')
+0xffffc2b5cf70->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaeb8eff40 'a')
+-> $$ = nterm item (0xffffc2b5cf70 'a')
+0xaaaaeb8eff40->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40, 0xffffc2b5cf70 }
+0xaaaaeb8eff40->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5cf70 }
+0xffffc2b5cf20->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40, 0xffffc2b5cf70 }
+0xffffc2b5cf20->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40, 0xffffc2b5cf20, 0xffffc2b5cf70 }
+0xffffc2b5cf70->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40, 0xffffc2b5cf70 }
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xaaaad9c0ec80->Object::Object { 0xaaaad9c0eb40, 0xaaaad9c0eb90, 0xaaaad9c0ebe0, 0xaaaad9c0ec30 }
-Next token is token 'p' (0xaaaad9c0ec80 'p'Exception caught: cleaning lookahead and stack
-0xaaaad9c0ec80->Object::~Object { 0xaaaad9c0eb40, 0xaaaad9c0eb90, 0xaaaad9c0ebe0, 0xaaaad9c0ec30, 0xaaaad9c0ec80 }
-0xaaaad9c0ec30->Object::~Object { 0xaaaad9c0eb40, 0xaaaad9c0eb90, 0xaaaad9c0ebe0, 0xaaaad9c0ec30 }
-0xaaaad9c0ebe0->Object::~Object { 0xaaaad9c0eb40, 0xaaaad9c0eb90, 0xaaaad9c0ebe0 }
-0xaaaad9c0eb90->Object::~Object { 0xaaaad9c0eb40, 0xaaaad9c0eb90 }
-0xaaaad9c0eb40->Object::~Object { 0xaaaad9c0eb40 }
+0xffffc2b5ce80->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40 }
+0xffffc2b5cf50->Object::Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40, 0xffffc2b5ce80 }
+0xffffc2b5ce80->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40, 0xffffc2b5ce80, 0xffffc2b5cf50 }
+Next token is token 'p' (0xffffc2b5cf50 'p'Exception caught: cleaning lookahead and stack
+0xaaaaeb8eff40->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xaaaaeb8eff40, 0xffffc2b5cf50 }
+0xaaaaeb8eff20->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xaaaaeb8eff20, 0xffffc2b5cf50 }
+0xaaaaeb8eff00->Object::~Object { 0xaaaaeb8efee0, 0xaaaaeb8eff00, 0xffffc2b5cf50 }
+0xaaaaeb8efee0->Object::~Object { 0xaaaaeb8efee0, 0xffffc2b5cf50 }
+0xffffc2b5cf50->Object::~Object { 0xffffc2b5cf50 }
 exception caught: printer
 end { }
-./c++.at:1361: grep '^exception caught: printer$' stderr
+./c++.at:1363: grep '^exception caught: printer$' stderr
 stdout:
 exception caught: printer
-./c++.at:1361:  $PREPARSER ./input aaaae
+./c++.at:1363:  $PREPARSER ./input aaaae
 stderr:
 exception caught: syntax error
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-
-./c++.at:1361:  $PREPARSER ./input aaaaE
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input aaaaE
 stderr:
 exception caught: syntax error, unexpected end of file, expecting 'a'
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1361:  $PREPARSER ./input aaaaT
-stderr:
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1361:  $PREPARSER ./input aaaaR
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input aaaaT
 stderr:
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:1361: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-692. java.at:25: testing Java invalid directives ...
-./java.at:35: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret YYParser.y
-./java.at:50: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -fcaret YYParser.y
-692. java.at:25:  ok
+./c++.at:1555: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input aaaaR
 stderr:
-stdout:
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:1411: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./c++.at:1363: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-
 ./c++.at:1362:  $PREPARSER ./input aaaas
 stderr:
+stderr:
+stdout:
 exception caught: reduction
 ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:1411: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 ./c++.at:1362:  $PREPARSER ./input aaaal
 stderr:
 exception caught: yylex
@@ -256555,99 +257160,99 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xffffd0d1e270->Object::Object { }
-0xffffd0d1e340->Object::Object { 0xffffd0d1e270 }
-0xffffd0d1e270->Object::~Object { 0xffffd0d1e270, 0xffffd0d1e340 }
-Next token is token 'a' (0xffffd0d1e340 'a')
-0xffffd0d1e260->Object::Object { 0xffffd0d1e340 }
-0xffffd0d1e340->Object::~Object { 0xffffd0d1e260, 0xffffd0d1e340 }
-Shifting token 'a' (0xffffd0d1e260 'a')
-0xaaaaff7f8ee0->Object::Object { 0xffffd0d1e260 }
-0xffffd0d1e260->Object::~Object { 0xaaaaff7f8ee0, 0xffffd0d1e260 }
+0xffffde0f4aa0->Object::Object { }
+0xffffde0f4b70->Object::Object { 0xffffde0f4aa0 }
+0xffffde0f4aa0->Object::~Object { 0xffffde0f4aa0, 0xffffde0f4b70 }
+Next token is token 'a' (0xffffde0f4b70 'a')
+0xffffde0f4a90->Object::Object { 0xffffde0f4b70 }
+0xffffde0f4b70->Object::~Object { 0xffffde0f4a90, 0xffffde0f4b70 }
+Shifting token 'a' (0xffffde0f4a90 'a')
+0xaaaadb0f5ee0->Object::Object { 0xffffde0f4a90 }
+0xffffde0f4a90->Object::~Object { 0xaaaadb0f5ee0, 0xffffde0f4a90 }
 Entering state 2
 Stack now 0 2
-0xffffd0d1e360->Object::Object { 0xaaaaff7f8ee0 }
+0xffffde0f4b90->Object::Object { 0xaaaadb0f5ee0 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaff7f8ee0 'a')
--> $$ = nterm item (0xffffd0d1e360 'a')
-0xaaaaff7f8ee0->Object::~Object { 0xaaaaff7f8ee0, 0xffffd0d1e360 }
-0xaaaaff7f8ee0->Object::Object { 0xffffd0d1e360 }
-0xffffd0d1e360->Object::~Object { 0xaaaaff7f8ee0, 0xffffd0d1e360 }
+   $1 = token 'a' (0xaaaadb0f5ee0 'a')
+-> $$ = nterm item (0xffffde0f4b90 'a')
+0xaaaadb0f5ee0->Object::~Object { 0xaaaadb0f5ee0, 0xffffde0f4b90 }
+0xaaaadb0f5ee0->Object::Object { 0xffffde0f4b90 }
+0xffffde0f4b90->Object::~Object { 0xaaaadb0f5ee0, 0xffffde0f4b90 }
 Entering state 11
 Stack now 0 11
 Reading a token
-0xffffd0d1e270->Object::Object { 0xaaaaff7f8ee0 }
-0xffffd0d1e340->Object::Object { 0xaaaaff7f8ee0, 0xffffd0d1e270 }
-0xffffd0d1e270->Object::~Object { 0xaaaaff7f8ee0, 0xffffd0d1e270, 0xffffd0d1e340 }
-Next token is token 'a' (0xffffd0d1e340 'a')
-0xffffd0d1e260->Object::Object { 0xaaaaff7f8ee0, 0xffffd0d1e340 }
-0xffffd0d1e340->Object::~Object { 0xaaaaff7f8ee0, 0xffffd0d1e260, 0xffffd0d1e340 }
-Shifting token 'a' (0xffffd0d1e260 'a')
-0xaaaaff7f8f00->Object::Object { 0xaaaaff7f8ee0, 0xffffd0d1e260 }
-0xffffd0d1e260->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xffffd0d1e260 }
+0xffffde0f4aa0->Object::Object { 0xaaaadb0f5ee0 }
+0xffffde0f4b70->Object::Object { 0xaaaadb0f5ee0, 0xffffde0f4aa0 }
+0xffffde0f4aa0->Object::~Object { 0xaaaadb0f5ee0, 0xffffde0f4aa0, 0xffffde0f4b70 }
+Next token is token 'a' (0xffffde0f4b70 'a')
+0xffffde0f4a90->Object::Object { 0xaaaadb0f5ee0, 0xffffde0f4b70 }
+0xffffde0f4b70->Object::~Object { 0xaaaadb0f5ee0, 0xffffde0f4a90, 0xffffde0f4b70 }
+Shifting token 'a' (0xffffde0f4a90 'a')
+0xaaaadb0f5f00->Object::Object { 0xaaaadb0f5ee0, 0xffffde0f4a90 }
+0xffffde0f4a90->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xffffde0f4a90 }
 Entering state 2
 Stack now 0 11 2
-0xffffd0d1e360->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00 }
+0xffffde0f4b90->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaff7f8f00 'a')
--> $$ = nterm item (0xffffd0d1e360 'a')
-0xaaaaff7f8f00->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xffffd0d1e360 }
-0xaaaaff7f8f00->Object::Object { 0xaaaaff7f8ee0, 0xffffd0d1e360 }
-0xffffd0d1e360->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xffffd0d1e360 }
+   $1 = token 'a' (0xaaaadb0f5f00 'a')
+-> $$ = nterm item (0xffffde0f4b90 'a')
+0xaaaadb0f5f00->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xffffde0f4b90 }
+0xaaaadb0f5f00->Object::Object { 0xaaaadb0f5ee0, 0xffffde0f4b90 }
+0xffffde0f4b90->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xffffde0f4b90 }
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xffffd0d1e270->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00 }
-0xffffd0d1e340->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xffffd0d1e270 }
-0xffffd0d1e270->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xffffd0d1e270, 0xffffd0d1e340 }
-Next token is token 'a' (0xffffd0d1e340 'a')
-0xffffd0d1e260->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xffffd0d1e340 }
-0xffffd0d1e340->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xffffd0d1e260, 0xffffd0d1e340 }
-Shifting token 'a' (0xffffd0d1e260 'a')
-0xaaaaff7f8f20->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xffffd0d1e260 }
-0xffffd0d1e260->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xffffd0d1e260 }
+0xffffde0f4aa0->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00 }
+0xffffde0f4b70->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xffffde0f4aa0 }
+0xffffde0f4aa0->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xffffde0f4aa0, 0xffffde0f4b70 }
+Next token is token 'a' (0xffffde0f4b70 'a')
+0xffffde0f4a90->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xffffde0f4b70 }
+0xffffde0f4b70->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xffffde0f4a90, 0xffffde0f4b70 }
+Shifting token 'a' (0xffffde0f4a90 'a')
+0xaaaadb0f5f20->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xffffde0f4a90 }
+0xffffde0f4a90->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xffffde0f4a90 }
 Entering state 2
 Stack now 0 11 11 2
-0xffffd0d1e360->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20 }
+0xffffde0f4b90->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaff7f8f20 'a')
--> $$ = nterm item (0xffffd0d1e360 'a')
-0xaaaaff7f8f20->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xffffd0d1e360 }
-0xaaaaff7f8f20->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xffffd0d1e360 }
-0xffffd0d1e360->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xffffd0d1e360 }
+   $1 = token 'a' (0xaaaadb0f5f20 'a')
+-> $$ = nterm item (0xffffde0f4b90 'a')
+0xaaaadb0f5f20->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xffffde0f4b90 }
+0xaaaadb0f5f20->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xffffde0f4b90 }
+0xffffde0f4b90->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xffffde0f4b90 }
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xffffd0d1e270->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20 }
-0xffffd0d1e340->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xffffd0d1e270 }
-0xffffd0d1e270->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xffffd0d1e270, 0xffffd0d1e340 }
-Next token is token 'a' (0xffffd0d1e340 'a')
-0xffffd0d1e260->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xffffd0d1e340 }
-0xffffd0d1e340->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xffffd0d1e260, 0xffffd0d1e340 }
-Shifting token 'a' (0xffffd0d1e260 'a')
-0xaaaaff7f8f40->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xffffd0d1e260 }
-0xffffd0d1e260->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xaaaaff7f8f40, 0xffffd0d1e260 }
+0xffffde0f4aa0->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20 }
+0xffffde0f4b70->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xffffde0f4aa0 }
+0xffffde0f4aa0->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xffffde0f4aa0, 0xffffde0f4b70 }
+Next token is token 'a' (0xffffde0f4b70 'a')
+0xffffde0f4a90->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xffffde0f4b70 }
+0xffffde0f4b70->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xffffde0f4a90, 0xffffde0f4b70 }
+Shifting token 'a' (0xffffde0f4a90 'a')
+0xaaaadb0f5f40->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xffffde0f4a90 }
+0xffffde0f4a90->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xaaaadb0f5f40, 0xffffde0f4a90 }
 Entering state 2
 Stack now 0 11 11 11 2
-0xffffd0d1e360->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xaaaaff7f8f40 }
+0xffffde0f4b90->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xaaaadb0f5f40 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaff7f8f40 'a')
--> $$ = nterm item (0xffffd0d1e360 'a')
-0xaaaaff7f8f40->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xaaaaff7f8f40, 0xffffd0d1e360 }
-0xaaaaff7f8f40->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xffffd0d1e360 }
-0xffffd0d1e360->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xaaaaff7f8f40, 0xffffd0d1e360 }
+   $1 = token 'a' (0xaaaadb0f5f40 'a')
+-> $$ = nterm item (0xffffde0f4b90 'a')
+0xaaaadb0f5f40->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xaaaadb0f5f40, 0xffffde0f4b90 }
+0xaaaadb0f5f40->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xffffde0f4b90 }
+0xffffde0f4b90->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xaaaadb0f5f40, 0xffffde0f4b90 }
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xffffd0d1e270->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xaaaaff7f8f40 }
-0xffffd0d1e340->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xaaaaff7f8f40, 0xffffd0d1e270 }
-0xffffd0d1e270->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xaaaaff7f8f40, 0xffffd0d1e270, 0xffffd0d1e340 }
-Next token is token 'p' (0xffffd0d1e340 'p'Exception caught: cleaning lookahead and stack
-0xaaaaff7f8f40->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xaaaaff7f8f40, 0xffffd0d1e340 }
-0xaaaaff7f8f20->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xffffd0d1e340 }
-0xaaaaff7f8f00->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xffffd0d1e340 }
-0xaaaaff7f8ee0->Object::~Object { 0xaaaaff7f8ee0, 0xffffd0d1e340 }
-0xffffd0d1e340->Object::~Object { 0xffffd0d1e340 }
+0xffffde0f4aa0->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xaaaadb0f5f40 }
+0xffffde0f4b70->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xaaaadb0f5f40, 0xffffde0f4aa0 }
+0xffffde0f4aa0->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xaaaadb0f5f40, 0xffffde0f4aa0, 0xffffde0f4b70 }
+Next token is token 'p' (0xffffde0f4b70 'p'Exception caught: cleaning lookahead and stack
+0xaaaadb0f5f40->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xaaaadb0f5f40, 0xffffde0f4b70 }
+0xaaaadb0f5f20->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xffffde0f4b70 }
+0xaaaadb0f5f00->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xffffde0f4b70 }
+0xaaaadb0f5ee0->Object::~Object { 0xaaaadb0f5ee0, 0xffffde0f4b70 }
+0xffffde0f4b70->Object::~Object { 0xffffde0f4b70 }
 exception caught: printer
 end { }
 ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -256656,99 +257261,99 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xffffd0d1e270->Object::Object { }
-0xffffd0d1e340->Object::Object { 0xffffd0d1e270 }
-0xffffd0d1e270->Object::~Object { 0xffffd0d1e270, 0xffffd0d1e340 }
-Next token is token 'a' (0xffffd0d1e340 'a')
-0xffffd0d1e260->Object::Object { 0xffffd0d1e340 }
-0xffffd0d1e340->Object::~Object { 0xffffd0d1e260, 0xffffd0d1e340 }
-Shifting token 'a' (0xffffd0d1e260 'a')
-0xaaaaff7f8ee0->Object::Object { 0xffffd0d1e260 }
-0xffffd0d1e260->Object::~Object { 0xaaaaff7f8ee0, 0xffffd0d1e260 }
+0xffffde0f4aa0->Object::Object { }
+0xffffde0f4b70->Object::Object { 0xffffde0f4aa0 }
+0xffffde0f4aa0->Object::~Object { 0xffffde0f4aa0, 0xffffde0f4b70 }
+Next token is token 'a' (0xffffde0f4b70 'a')
+0xffffde0f4a90->Object::Object { 0xffffde0f4b70 }
+0xffffde0f4b70->Object::~Object { 0xffffde0f4a90, 0xffffde0f4b70 }
+Shifting token 'a' (0xffffde0f4a90 'a')
+0xaaaadb0f5ee0->Object::Object { 0xffffde0f4a90 }
+0xffffde0f4a90->Object::~Object { 0xaaaadb0f5ee0, 0xffffde0f4a90 }
 Entering state 2
 Stack now 0 2
-0xffffd0d1e360->Object::Object { 0xaaaaff7f8ee0 }
+0xffffde0f4b90->Object::Object { 0xaaaadb0f5ee0 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaff7f8ee0 'a')
--> $$ = nterm item (0xffffd0d1e360 'a')
-0xaaaaff7f8ee0->Object::~Object { 0xaaaaff7f8ee0, 0xffffd0d1e360 }
-0xaaaaff7f8ee0->Object::Object { 0xffffd0d1e360 }
-0xffffd0d1e360->Object::~Object { 0xaaaaff7f8ee0, 0xffffd0d1e360 }
+   $1 = token 'a' (0xaaaadb0f5ee0 'a')
+-> $$ = nterm item (0xffffde0f4b90 'a')
+0xaaaadb0f5ee0->Object::~Object { 0xaaaadb0f5ee0, 0xffffde0f4b90 }
+0xaaaadb0f5ee0->Object::Object { 0xffffde0f4b90 }
+0xffffde0f4b90->Object::~Object { 0xaaaadb0f5ee0, 0xffffde0f4b90 }
 Entering state 11
 Stack now 0 11
 Reading a token
-0xffffd0d1e270->Object::Object { 0xaaaaff7f8ee0 }
-0xffffd0d1e340->Object::Object { 0xaaaaff7f8ee0, 0xffffd0d1e270 }
-0xffffd0d1e270->Object::~Object { 0xaaaaff7f8ee0, 0xffffd0d1e270, 0xffffd0d1e340 }
-Next token is token 'a' (0xffffd0d1e340 'a')
-0xffffd0d1e260->Object::Object { 0xaaaaff7f8ee0, 0xffffd0d1e340 }
-0xffffd0d1e340->Object::~Object { 0xaaaaff7f8ee0, 0xffffd0d1e260, 0xffffd0d1e340 }
-Shifting token 'a' (0xffffd0d1e260 'a')
-0xaaaaff7f8f00->Object::Object { 0xaaaaff7f8ee0, 0xffffd0d1e260 }
-0xffffd0d1e260->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xffffd0d1e260 }
+0xffffde0f4aa0->Object::Object { 0xaaaadb0f5ee0 }
+0xffffde0f4b70->Object::Object { 0xaaaadb0f5ee0, 0xffffde0f4aa0 }
+0xffffde0f4aa0->Object::~Object { 0xaaaadb0f5ee0, 0xffffde0f4aa0, 0xffffde0f4b70 }
+Next token is token 'a' (0xffffde0f4b70 'a')
+0xffffde0f4a90->Object::Object { 0xaaaadb0f5ee0, 0xffffde0f4b70 }
+0xffffde0f4b70->Object::~Object { 0xaaaadb0f5ee0, 0xffffde0f4a90, 0xffffde0f4b70 }
+Shifting token 'a' (0xffffde0f4a90 'a')
+0xaaaadb0f5f00->Object::Object { 0xaaaadb0f5ee0, 0xffffde0f4a90 }
+0xffffde0f4a90->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xffffde0f4a90 }
 Entering state 2
 Stack now 0 11 2
-0xffffd0d1e360->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00 }
+0xffffde0f4b90->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaff7f8f00 'a')
--> $$ = nterm item (0xffffd0d1e360 'a')
-0xaaaaff7f8f00->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xffffd0d1e360 }
-0xaaaaff7f8f00->Object::Object { 0xaaaaff7f8ee0, 0xffffd0d1e360 }
-0xffffd0d1e360->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xffffd0d1e360 }
+   $1 = token 'a' (0xaaaadb0f5f00 'a')
+-> $$ = nterm item (0xffffde0f4b90 'a')
+0xaaaadb0f5f00->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xffffde0f4b90 }
+0xaaaadb0f5f00->Object::Object { 0xaaaadb0f5ee0, 0xffffde0f4b90 }
+0xffffde0f4b90->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xffffde0f4b90 }
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xffffd0d1e270->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00 }
-0xffffd0d1e340->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xffffd0d1e270 }
-0xffffd0d1e270->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xffffd0d1e270, 0xffffd0d1e340 }
-Next token is token 'a' (0xffffd0d1e340 'a')
-0xffffd0d1e260->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xffffd0d1e340 }
-0xffffd0d1e340->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xffffd0d1e260, 0xffffd0d1e340 }
-Shifting token 'a' (0xffffd0d1e260 'a')
-0xaaaaff7f8f20->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xffffd0d1e260 }
-0xffffd0d1e260->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xffffd0d1e260 }
+0xffffde0f4aa0->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00 }
+0xffffde0f4b70->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xffffde0f4aa0 }
+0xffffde0f4aa0->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xffffde0f4aa0, 0xffffde0f4b70 }
+Next token is token 'a' (0xffffde0f4b70 'a')
+0xffffde0f4a90->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xffffde0f4b70 }
+0xffffde0f4b70->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xffffde0f4a90, 0xffffde0f4b70 }
+Shifting token 'a' (0xffffde0f4a90 'a')
+0xaaaadb0f5f20->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xffffde0f4a90 }
+0xffffde0f4a90->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xffffde0f4a90 }
 Entering state 2
 Stack now 0 11 11 2
-0xffffd0d1e360->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20 }
+0xffffde0f4b90->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaff7f8f20 'a')
--> $$ = nterm item (0xffffd0d1e360 'a')
-0xaaaaff7f8f20->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xffffd0d1e360 }
-0xaaaaff7f8f20->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xffffd0d1e360 }
-0xffffd0d1e360->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xffffd0d1e360 }
+   $1 = token 'a' (0xaaaadb0f5f20 'a')
+-> $$ = nterm item (0xffffde0f4b90 'a')
+0xaaaadb0f5f20->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xffffde0f4b90 }
+0xaaaadb0f5f20->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xffffde0f4b90 }
+0xffffde0f4b90->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xffffde0f4b90 }
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xffffd0d1e270->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20 }
-0xffffd0d1e340->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xffffd0d1e270 }
-0xffffd0d1e270->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xffffd0d1e270, 0xffffd0d1e340 }
-Next token is token 'a' (0xffffd0d1e340 'a')
-0xffffd0d1e260->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xffffd0d1e340 }
-0xffffd0d1e340->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xffffd0d1e260, 0xffffd0d1e340 }
-Shifting token 'a' (0xffffd0d1e260 'a')
-0xaaaaff7f8f40->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xffffd0d1e260 }
-0xffffd0d1e260->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xaaaaff7f8f40, 0xffffd0d1e260 }
+0xffffde0f4aa0->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20 }
+0xffffde0f4b70->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xffffde0f4aa0 }
+0xffffde0f4aa0->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xffffde0f4aa0, 0xffffde0f4b70 }
+Next token is token 'a' (0xffffde0f4b70 'a')
+0xffffde0f4a90->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xffffde0f4b70 }
+0xffffde0f4b70->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xffffde0f4a90, 0xffffde0f4b70 }
+Shifting token 'a' (0xffffde0f4a90 'a')
+0xaaaadb0f5f40->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xffffde0f4a90 }
+0xffffde0f4a90->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xaaaadb0f5f40, 0xffffde0f4a90 }
 Entering state 2
 Stack now 0 11 11 11 2
-0xffffd0d1e360->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xaaaaff7f8f40 }
+0xffffde0f4b90->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xaaaadb0f5f40 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaff7f8f40 'a')
--> $$ = nterm item (0xffffd0d1e360 'a')
-0xaaaaff7f8f40->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xaaaaff7f8f40, 0xffffd0d1e360 }
-0xaaaaff7f8f40->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xffffd0d1e360 }
-0xffffd0d1e360->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xaaaaff7f8f40, 0xffffd0d1e360 }
+   $1 = token 'a' (0xaaaadb0f5f40 'a')
+-> $$ = nterm item (0xffffde0f4b90 'a')
+0xaaaadb0f5f40->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xaaaadb0f5f40, 0xffffde0f4b90 }
+0xaaaadb0f5f40->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xffffde0f4b90 }
+0xffffde0f4b90->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xaaaadb0f5f40, 0xffffde0f4b90 }
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xffffd0d1e270->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xaaaaff7f8f40 }
-0xffffd0d1e340->Object::Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xaaaaff7f8f40, 0xffffd0d1e270 }
-0xffffd0d1e270->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xaaaaff7f8f40, 0xffffd0d1e270, 0xffffd0d1e340 }
-Next token is token 'p' (0xffffd0d1e340 'p'Exception caught: cleaning lookahead and stack
-0xaaaaff7f8f40->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xaaaaff7f8f40, 0xffffd0d1e340 }
-0xaaaaff7f8f20->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xaaaaff7f8f20, 0xffffd0d1e340 }
-0xaaaaff7f8f00->Object::~Object { 0xaaaaff7f8ee0, 0xaaaaff7f8f00, 0xffffd0d1e340 }
-0xaaaaff7f8ee0->Object::~Object { 0xaaaaff7f8ee0, 0xffffd0d1e340 }
-0xffffd0d1e340->Object::~Object { 0xffffd0d1e340 }
+0xffffde0f4aa0->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xaaaadb0f5f40 }
+0xffffde0f4b70->Object::Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xaaaadb0f5f40, 0xffffde0f4aa0 }
+0xffffde0f4aa0->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xaaaadb0f5f40, 0xffffde0f4aa0, 0xffffde0f4b70 }
+Next token is token 'p' (0xffffde0f4b70 'p'Exception caught: cleaning lookahead and stack
+0xaaaadb0f5f40->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xaaaadb0f5f40, 0xffffde0f4b70 }
+0xaaaadb0f5f20->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xaaaadb0f5f20, 0xffffde0f4b70 }
+0xaaaadb0f5f00->Object::~Object { 0xaaaadb0f5ee0, 0xaaaadb0f5f00, 0xffffde0f4b70 }
+0xaaaadb0f5ee0->Object::~Object { 0xaaaadb0f5ee0, 0xffffde0f4b70 }
+0xffffde0f4b70->Object::~Object { 0xffffde0f4b70 }
 exception caught: printer
 end { }
 ./c++.at:1362: grep '^exception caught: printer$' stderr
@@ -256762,423 +257367,168 @@
 stderr:
 exception caught: syntax error, unexpected end of file, expecting 'a'
 ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-693. java.at:186: testing Java parser class and package names ...
-./java.at:188: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated YYParser.y
 ./c++.at:1362:  $PREPARSER ./input aaaaT
 stderr:
 ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1362:  $PREPARSER ./input aaaaR
 stderr:
 ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./java.at:188: grep '[mb]4_' YYParser.y
 ======== Testing with C++ standard flags: ''
 ./c++.at:1362: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stdout:
-693. java.at:186:  skipped (java.at:188)
-
-694. java.at:217: testing Java parser class modifiers ...
-./java.at:219: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated YYParser.y
-./java.at:219: grep '[mb]4_' YYParser.y
-stdout:
-694. java.at:217:  skipped (java.at:219)
-
-695. java.at:287: testing Java parser class extends and implements ...
-./java.at:289: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated YYParser.y
-stderr:
-stdout:
-./c++.at:1555:  $PREPARSER ./test
-stderr:
-./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./java.at:289: grep '[mb]4_' YYParser.y
-./c++.at:1555: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-stdout:
-695. java.at:287:  skipped (java.at:289)
-
 stderr:
 stdout:
-./c++.at:1363:  $PREPARSER ./input aaaas
+./c++.at:1361:  $PREPARSER ./input aaaas
 stderr:
 exception caught: reduction
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input aaaal
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1361:  $PREPARSER ./input aaaal
 stderr:
 exception caught: yylex
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input i
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1361:  $PREPARSER ./input i
 stderr:
 exception caught: initial-action
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input aaaap
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1361:  $PREPARSER ./input aaaap
 stderr:
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input --debug aaaap
-696. java.at:307: testing Java %parse-param and %lex-param ...
-./java.at:309: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated YYParser.y
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1361:  $PREPARSER ./input --debug aaaap
 stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-0xffffe63d3d50->Object::Object { }
-0xffffe63d3e20->Object::Object { 0xffffe63d3d50 }
-0xffffe63d3d50->Object::~Object { 0xffffe63d3d50, 0xffffe63d3e20 }
-Next token is token 'a' (0xffffe63d3e20 'a')
-0xffffe63d3d60->Object::Object { 0xffffe63d3e20 }
-0xffffe63d3d10->Object::Object { 0xffffe63d3d60, 0xffffe63d3e20 }
-0xffffe63d3d10->Object::~Object { 0xffffe63d3d10, 0xffffe63d3d60, 0xffffe63d3e20 }
-0xffffe63d3e20->Object::~Object { 0xffffe63d3d60, 0xffffe63d3e20 }
-Shifting token 'a' (0xffffe63d3d60 'a')
-0xaaaaeea11ee0->Object::Object { 0xffffe63d3d60 }
-0xffffe63d3ce8->Object::Object { 0xaaaaeea11ee0, 0xffffe63d3d60 }
-0xffffe63d3ce8->Object::~Object { 0xaaaaeea11ee0, 0xffffe63d3ce8, 0xffffe63d3d60 }
-0xffffe63d3d60->Object::~Object { 0xaaaaeea11ee0, 0xffffe63d3d60 }
+0xaaab0baa5b40->Object::Object { }
+Next token is token 'a' (0xaaab0baa5b40 'a')
+Shifting token 'a' (0xaaab0baa5b40 'a')
 Entering state 1
 Stack now 0 1
-0xffffe63d3e40->Object::Object { 0xaaaaeea11ee0 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaeea11ee0 'a')
--> $$ = nterm item (0xffffe63d3e40 'a')
-0xaaaaeea11ee0->Object::~Object { 0xaaaaeea11ee0, 0xffffe63d3e40 }
-0xaaaaeea11ee0->Object::Object { 0xffffe63d3e40 }
-0xffffe63d3df0->Object::Object { 0xaaaaeea11ee0, 0xffffe63d3e40 }
-0xffffe63d3df0->Object::~Object { 0xaaaaeea11ee0, 0xffffe63d3df0, 0xffffe63d3e40 }
-0xffffe63d3e40->Object::~Object { 0xaaaaeea11ee0, 0xffffe63d3e40 }
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaab0baa5b40 'a')
+-> $$ = nterm item (0xaaab0baa5b40 'a')
 Entering state 10
 Stack now 0 10
 Reading a token
-0xffffe63d3d50->Object::Object { 0xaaaaeea11ee0 }
-0xffffe63d3e20->Object::Object { 0xaaaaeea11ee0, 0xffffe63d3d50 }
-0xffffe63d3d50->Object::~Object { 0xaaaaeea11ee0, 0xffffe63d3d50, 0xffffe63d3e20 }
-Next token is token 'a' (0xffffe63d3e20 'a')
-0xffffe63d3d60->Object::Object { 0xaaaaeea11ee0, 0xffffe63d3e20 }
-0xffffe63d3d10->Object::Object { 0xaaaaeea11ee0, 0xffffe63d3d60, 0xffffe63d3e20 }
-0xffffe63d3d10->Object::~Object { 0xaaaaeea11ee0, 0xffffe63d3d10, 0xffffe63d3d60, 0xffffe63d3e20 }
-0xffffe63d3e20->Object::~Object { 0xaaaaeea11ee0, 0xffffe63d3d60, 0xffffe63d3e20 }
-Shifting token 'a' (0xffffe63d3d60 'a')
-0xaaaaeea11f00->Object::Object { 0xaaaaeea11ee0, 0xffffe63d3d60 }
-0xffffe63d3ce8->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3d60 }
-0xffffe63d3ce8->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3ce8, 0xffffe63d3d60 }
-0xffffe63d3d60->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3d60 }
+0xaaab0baa5b90->Object::Object { 0xaaab0baa5b40 }
+Next token is token 'a' (0xaaab0baa5b90 'a')
+Shifting token 'a' (0xaaab0baa5b90 'a')
 Entering state 1
 Stack now 0 10 1
-0xffffe63d3e40->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaeea11f00 'a')
--> $$ = nterm item (0xffffe63d3e40 'a')
-0xaaaaeea11f00->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3e40 }
-0xaaaaeea11f00->Object::Object { 0xaaaaeea11ee0, 0xffffe63d3e40 }
-0xffffe63d3df0->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3e40 }
-0xffffe63d3df0->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3df0, 0xffffe63d3e40 }
-0xffffe63d3e40->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3e40 }
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaab0baa5b90 'a')
+-> $$ = nterm item (0xaaab0baa5b90 'a')
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xffffe63d3d50->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00 }
-0xffffe63d3e20->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3d50 }
-0xffffe63d3d50->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3d50, 0xffffe63d3e20 }
-Next token is token 'a' (0xffffe63d3e20 'a')
-0xffffe63d3d60->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3e20 }
-0xffffe63d3d10->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3d60, 0xffffe63d3e20 }
-0xffffe63d3d10->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3d10, 0xffffe63d3d60, 0xffffe63d3e20 }
-0xffffe63d3e20->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3d60, 0xffffe63d3e20 }
-Shifting token 'a' (0xffffe63d3d60 'a')
-0xaaaaeea11f20->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3d60 }
-0xffffe63d3ce8->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3d60 }
-0xffffe63d3ce8->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3ce8, 0xffffe63d3d60 }
-0xffffe63d3d60->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3d60 }
+0xaaab0baa5be0->Object::Object { 0xaaab0baa5b40, 0xaaab0baa5b90 }
+Next token is token 'a' (0xaaab0baa5be0 'a')
+Shifting token 'a' (0xaaab0baa5be0 'a')
 Entering state 1
 Stack now 0 10 10 1
-0xffffe63d3e40->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaeea11f20 'a')
--> $$ = nterm item (0xffffe63d3e40 'a')
-0xaaaaeea11f20->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3e40 }
-0xaaaaeea11f20->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3e40 }
-0xffffe63d3df0->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3e40 }
-0xffffe63d3df0->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3df0, 0xffffe63d3e40 }
-0xffffe63d3e40->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3e40 }
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaab0baa5be0 'a')
+-> $$ = nterm item (0xaaab0baa5be0 'a')
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xffffe63d3d50->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20 }
-0xffffe63d3e20->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3d50 }
-0xffffe63d3d50->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3d50, 0xffffe63d3e20 }
-Next token is token 'a' (0xffffe63d3e20 'a')
-0xffffe63d3d60->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3e20 }
-0xffffe63d3d10->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3d60, 0xffffe63d3e20 }
-0xffffe63d3d10->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3d10, 0xffffe63d3d60, 0xffffe63d3e20 }
-0xffffe63d3e20->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3d60, 0xffffe63d3e20 }
-Shifting token 'a' (0xffffe63d3d60 'a')
-0xaaaaeea11f40->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3d60 }
-0xffffe63d3ce8->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40, 0xffffe63d3d60 }
-0xffffe63d3ce8->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40, 0xffffe63d3ce8, 0xffffe63d3d60 }
-0xffffe63d3d60->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40, 0xffffe63d3d60 }
+0xaaab0baa5c30->Object::Object { 0xaaab0baa5b40, 0xaaab0baa5b90, 0xaaab0baa5be0 }
+Next token is token 'a' (0xaaab0baa5c30 'a')
+Shifting token 'a' (0xaaab0baa5c30 'a')
 Entering state 1
 Stack now 0 10 10 10 1
-0xffffe63d3e40->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaeea11f40 'a')
--> $$ = nterm item (0xffffe63d3e40 'a')
-0xaaaaeea11f40->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40, 0xffffe63d3e40 }
-0xaaaaeea11f40->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3e40 }
-0xffffe63d3df0->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40, 0xffffe63d3e40 }
-0xffffe63d3df0->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40, 0xffffe63d3df0, 0xffffe63d3e40 }
-0xffffe63d3e40->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40, 0xffffe63d3e40 }
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaab0baa5c30 'a')
+-> $$ = nterm item (0xaaab0baa5c30 'a')
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xffffe63d3d50->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40 }
-0xffffe63d3e20->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40, 0xffffe63d3d50 }
-0xffffe63d3d50->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40, 0xffffe63d3d50, 0xffffe63d3e20 }
-Next token is token 'p' (0xffffe63d3e20 'p'Exception caught: cleaning lookahead and stack
-0xaaaaeea11f40->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40, 0xffffe63d3e20 }
-0xaaaaeea11f20->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3e20 }
-0xaaaaeea11f00->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3e20 }
-0xaaaaeea11ee0->Object::~Object { 0xaaaaeea11ee0, 0xffffe63d3e20 }
-0xffffe63d3e20->Object::~Object { 0xffffe63d3e20 }
+0xaaab0baa5c80->Object::Object { 0xaaab0baa5b40, 0xaaab0baa5b90, 0xaaab0baa5be0, 0xaaab0baa5c30 }
+Next token is token 'p' (0xaaab0baa5c80 'p'Exception caught: cleaning lookahead and stack
+0xaaab0baa5c80->Object::~Object { 0xaaab0baa5b40, 0xaaab0baa5b90, 0xaaab0baa5be0, 0xaaab0baa5c30, 0xaaab0baa5c80 }
+0xaaab0baa5c30->Object::~Object { 0xaaab0baa5b40, 0xaaab0baa5b90, 0xaaab0baa5be0, 0xaaab0baa5c30 }
+0xaaab0baa5be0->Object::~Object { 0xaaab0baa5b40, 0xaaab0baa5b90, 0xaaab0baa5be0 }
+0xaaab0baa5b90->Object::~Object { 0xaaab0baa5b40, 0xaaab0baa5b90 }
+0xaaab0baa5b40->Object::~Object { 0xaaab0baa5b40 }
 exception caught: printer
 end { }
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-0xffffe63d3d50->Object::Object { }
-0xffffe63d3e20->Object::Object { 0xffffe63d3d50 }
-0xffffe63d3d50->Object::~Object { 0xffffe63d3d50, 0xffffe63d3e20 }
-Next token is token 'a' (0xffffe63d3e20 'a')
-0xffffe63d3d60->Object::Object { 0xffffe63d3e20 }
-0xffffe63d3d10->Object::Object { 0xffffe63d3d60, 0xffffe63d3e20 }
-0xffffe63d3d10->Object::~Object { 0xffffe63d3d10, 0xffffe63d3d60, 0xffffe63d3e20 }
-0xffffe63d3e20->Object::~Object { 0xffffe63d3d60, 0xffffe63d3e20 }
-Shifting token 'a' (0xffffe63d3d60 'a')
-0xaaaaeea11ee0->Object::Object { 0xffffe63d3d60 }
-0xffffe63d3ce8->Object::Object { 0xaaaaeea11ee0, 0xffffe63d3d60 }
-0xffffe63d3ce8->Object::~Object { 0xaaaaeea11ee0, 0xffffe63d3ce8, 0xffffe63d3d60 }
-0xffffe63d3d60->Object::~Object { 0xaaaaeea11ee0, 0xffffe63d3d60 }
+0xaaab0baa5b40->Object::Object { }
+Next token is token 'a' (0xaaab0baa5b40 'a')
+Shifting token 'a' (0xaaab0baa5b40 'a')
 Entering state 1
 Stack now 0 1
-0xffffe63d3e40->Object::Object { 0xaaaaeea11ee0 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaeea11ee0 'a')
--> $$ = nterm item (0xffffe63d3e40 'a')
-0xaaaaeea11ee0->Object::~Object { 0xaaaaeea11ee0, 0xffffe63d3e40 }
-0xaaaaeea11ee0->Object::Object { 0xffffe63d3e40 }
-0xffffe63d3df0->Object::Object { 0xaaaaeea11ee0, 0xffffe63d3e40 }
-0xffffe63d3df0->Object::~Object { 0xaaaaeea11ee0, 0xffffe63d3df0, 0xffffe63d3e40 }
-0xffffe63d3e40->Object::~Object { 0xaaaaeea11ee0, 0xffffe63d3e40 }
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaab0baa5b40 'a')
+-> $$ = nterm item (0xaaab0baa5b40 'a')
 Entering state 10
 Stack now 0 10
 Reading a token
-0xffffe63d3d50->Object::Object { 0xaaaaeea11ee0 }
-0xffffe63d3e20->Object::Object { 0xaaaaeea11ee0, 0xffffe63d3d50 }
-0xffffe63d3d50->Object::~Object { 0xaaaaeea11ee0, 0xffffe63d3d50, 0xffffe63d3e20 }
-Next token is token 'a' (0xffffe63d3e20 'a')
-0xffffe63d3d60->Object::Object { 0xaaaaeea11ee0, 0xffffe63d3e20 }
-0xffffe63d3d10->Object::Object { 0xaaaaeea11ee0, 0xffffe63d3d60, 0xffffe63d3e20 }
-0xffffe63d3d10->Object::~Object { 0xaaaaeea11ee0, 0xffffe63d3d10, 0xffffe63d3d60, 0xffffe63d3e20 }
-0xffffe63d3e20->Object::~Object { 0xaaaaeea11ee0, 0xffffe63d3d60, 0xffffe63d3e20 }
-Shifting token 'a' (0xffffe63d3d60 'a')
-0xaaaaeea11f00->Object::Object { 0xaaaaeea11ee0, 0xffffe63d3d60 }
-0xffffe63d3ce8->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3d60 }
-0xffffe63d3ce8->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3ce8, 0xffffe63d3d60 }
-0xffffe63d3d60->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3d60 }
+0xaaab0baa5b90->Object::Object { 0xaaab0baa5b40 }
+Next token is token 'a' (0xaaab0baa5b90 'a')
+Shifting token 'a' (0xaaab0baa5b90 'a')
 Entering state 1
 Stack now 0 10 1
-0xffffe63d3e40->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaeea11f00 'a')
--> $$ = nterm item (0xffffe63d3e40 'a')
-0xaaaaeea11f00->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3e40 }
-0xaaaaeea11f00->Object::Object { 0xaaaaeea11ee0, 0xffffe63d3e40 }
-0xffffe63d3df0->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3e40 }
-0xffffe63d3df0->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3df0, 0xffffe63d3e40 }
-0xffffe63d3e40->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3e40 }
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaab0baa5b90 'a')
+-> $$ = nterm item (0xaaab0baa5b90 'a')
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xffffe63d3d50->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00 }
-0xffffe63d3e20->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3d50 }
-0xffffe63d3d50->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3d50, 0xffffe63d3e20 }
-Next token is token 'a' (0xffffe63d3e20 'a')
-0xffffe63d3d60->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3e20 }
-0xffffe63d3d10->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3d60, 0xffffe63d3e20 }
-0xffffe63d3d10->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3d10, 0xffffe63d3d60, 0xffffe63d3e20 }
-0xffffe63d3e20->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3d60, 0xffffe63d3e20 }
-Shifting token 'a' (0xffffe63d3d60 'a')
-0xaaaaeea11f20->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3d60 }
-0xffffe63d3ce8->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3d60 }
-0xffffe63d3ce8->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3ce8, 0xffffe63d3d60 }
-0xffffe63d3d60->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3d60 }
+0xaaab0baa5be0->Object::Object { 0xaaab0baa5b40, 0xaaab0baa5b90 }
+Next token is token 'a' (0xaaab0baa5be0 'a')
+Shifting token 'a' (0xaaab0baa5be0 'a')
 Entering state 1
 Stack now 0 10 10 1
-0xffffe63d3e40->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaeea11f20 'a')
--> $$ = nterm item (0xffffe63d3e40 'a')
-0xaaaaeea11f20->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3e40 }
-0xaaaaeea11f20->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3e40 }
-0xffffe63d3df0->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3e40 }
-0xffffe63d3df0->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3df0, 0xffffe63d3e40 }
-0xffffe63d3e40->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3e40 }
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaab0baa5be0 'a')
+-> $$ = nterm item (0xaaab0baa5be0 'a')
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xffffe63d3d50->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20 }
-0xffffe63d3e20->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3d50 }
-0xffffe63d3d50->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3d50, 0xffffe63d3e20 }
-Next token is token 'a' (0xffffe63d3e20 'a')
-0xffffe63d3d60->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3e20 }
-0xffffe63d3d10->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3d60, 0xffffe63d3e20 }
-0xffffe63d3d10->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3d10, 0xffffe63d3d60, 0xffffe63d3e20 }
-0xffffe63d3e20->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3d60, 0xffffe63d3e20 }
-Shifting token 'a' (0xffffe63d3d60 'a')
-0xaaaaeea11f40->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3d60 }
-0xffffe63d3ce8->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40, 0xffffe63d3d60 }
-0xffffe63d3ce8->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40, 0xffffe63d3ce8, 0xffffe63d3d60 }
-0xffffe63d3d60->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40, 0xffffe63d3d60 }
+0xaaab0baa5c30->Object::Object { 0xaaab0baa5b40, 0xaaab0baa5b90, 0xaaab0baa5be0 }
+Next token is token 'a' (0xaaab0baa5c30 'a')
+Shifting token 'a' (0xaaab0baa5c30 'a')
 Entering state 1
 Stack now 0 10 10 10 1
-0xffffe63d3e40->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaaeea11f40 'a')
--> $$ = nterm item (0xffffe63d3e40 'a')
-0xaaaaeea11f40->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40, 0xffffe63d3e40 }
-0xaaaaeea11f40->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3e40 }
-0xffffe63d3df0->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40, 0xffffe63d3e40 }
-0xffffe63d3df0->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40, 0xffffe63d3df0, 0xffffe63d3e40 }
-0xffffe63d3e40->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40, 0xffffe63d3e40 }
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaab0baa5c30 'a')
+-> $$ = nterm item (0xaaab0baa5c30 'a')
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xffffe63d3d50->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40 }
-0xffffe63d3e20->Object::Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40, 0xffffe63d3d50 }
-0xffffe63d3d50->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40, 0xffffe63d3d50, 0xffffe63d3e20 }
-Next token is token 'p' (0xffffe63d3e20 'p'Exception caught: cleaning lookahead and stack
-0xaaaaeea11f40->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xaaaaeea11f40, 0xffffe63d3e20 }
-0xaaaaeea11f20->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xaaaaeea11f20, 0xffffe63d3e20 }
-0xaaaaeea11f00->Object::~Object { 0xaaaaeea11ee0, 0xaaaaeea11f00, 0xffffe63d3e20 }
-0xaaaaeea11ee0->Object::~Object { 0xaaaaeea11ee0, 0xffffe63d3e20 }
-0xffffe63d3e20->Object::~Object { 0xffffe63d3e20 }
+0xaaab0baa5c80->Object::Object { 0xaaab0baa5b40, 0xaaab0baa5b90, 0xaaab0baa5be0, 0xaaab0baa5c30 }
+Next token is token 'p' (0xaaab0baa5c80 'p'Exception caught: cleaning lookahead and stack
+0xaaab0baa5c80->Object::~Object { 0xaaab0baa5b40, 0xaaab0baa5b90, 0xaaab0baa5be0, 0xaaab0baa5c30, 0xaaab0baa5c80 }
+0xaaab0baa5c30->Object::~Object { 0xaaab0baa5b40, 0xaaab0baa5b90, 0xaaab0baa5be0, 0xaaab0baa5c30 }
+0xaaab0baa5be0->Object::~Object { 0xaaab0baa5b40, 0xaaab0baa5b90, 0xaaab0baa5be0 }
+0xaaab0baa5b90->Object::~Object { 0xaaab0baa5b40, 0xaaab0baa5b90 }
+0xaaab0baa5b40->Object::~Object { 0xaaab0baa5b40 }
 exception caught: printer
 end { }
-./c++.at:1363: grep '^exception caught: printer$' stderr
-stderr:
+./c++.at:1361: grep '^exception caught: printer$' stderr
 stdout:
 exception caught: printer
-stdout:
-./c++.at:1363:  $PREPARSER ./input aaaae
-./java.at:309: grep '[mb]4_' YYParser.y
+./c++.at:1361:  $PREPARSER ./input aaaae
 stderr:
-stdout:
-./c++.at:1555: ./check
 exception caught: syntax error
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
--std=c++98 not supported
-======== Testing with C++ standard flags: ''
-./c++.at:1555: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-696. java.at:307: ./c++.at:1363:  $PREPARSER ./input aaaaE
- skipped (java.at:309)
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1361:  $PREPARSER ./input aaaaE
 stderr:
 exception caught: syntax error, unexpected end of file, expecting 'a'
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input aaaaT
-
-stderr:
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input aaaaR
-stderr:
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:1363: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./c++.at:1555: ./check
--std=c++03 not supported
-======== Testing with C++ standard flags: ''
-./c++.at:1555: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-697. java.at:381: testing Java throws specifications ...
-./java.at:441: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated YYParser.y
-./java.at:441: grep '[mb]4_' YYParser.y
-stdout:
-697. java.at:381:  skipped (java.at:441)
-
-stderr:
-stdout:
-./c++.at:1555: ./check
--std=c++11 not supported
-======== Testing with C++ standard flags: ''
-./c++.at:1555: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-698. java.at:470: testing Java constructor init and init_throws ...
-./java.at:475: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated YYParser.y
-./java.at:475: grep '[mb]4_' YYParser.y
-stdout:
-698. java.at:470:  skipped (java.at:475)
-
-stderr:
-stdout:
-./c++.at:1555: ./check
-./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -o test.cc test.y
-699. java.at:497: testing Java value, position, and location types ...
-./java.at:499: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated YYParser.y
-./c++.at:1555: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./java.at:499: grep '[mb]4_' YYParser.y
-stdout:
-699. java.at:497:  skipped (java.at:499)
-
-700. java.at:528: testing Java syntax error handling without error token ...
-./java.at:579: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret YYParser.y
-700. java.at:528:  skipped (java.at:580)
-
-701. javapush.at:172: testing Trivial Push Parser with api.push-pull verification ...
-./javapush.at:181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dapi.push-pull=pull -o Main.java input.y
-./javapush.at:182: grep -c '^.*public boolean parse().*$' Main.java
-./javapush.at:187: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java
-./javapush.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dapi.push-pull=both -o Main.java input.y
-./javapush.at:192: grep -c '^.*public boolean parse().*$' Main.java
-./javapush.at:195: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java
-./javapush.at:199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dapi.push-pull=push -o Main.java input.y
-./javapush.at:200: grep -c '^.*public boolean parse().*$' Main.java
-./javapush.at:203: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java
-701. javapush.at:172:  skipped (javapush.at:207)
-
-702. javapush.at:217: testing Trivial Push Parser with %initial-action ...
-./javapush.at:227: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Dapi.push-pull=push -o Main.java input.y
-./javapush.at:228: grep -c '^System.err.println("Initial action invoked");$' Main.java
-702. javapush.at:217:  skipped (javapush.at:230)
-
-703. d.at:103: testing D parser class extends and implements ...
-./d.at:106: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated YYParser.y
-stderr:
-stdout:
-./c++.at:858:  $PREPARSER ./input
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1361:  $PREPARSER ./input aaaaT
 stderr:
-./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:858: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-./d.at:106: grep '[mb]4_' YYParser.y
-stdout:
-703. d.at:103:  skipped (d.at:106)
-
-704. d.at:138: testing D parser class api.token.raw true by default ...
-./d.at:141: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -Wno-deprecated YYParser.y
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1361:  $PREPARSER ./input aaaaR
 stderr:
-stdout:
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:1411: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-./d.at:141: grep '[mb]4_' YYParser.y
-stdout:
-704. d.at:138:  skipped (d.at:141)
-
-705. cxx-type.at:409: testing GLR: Resolve ambiguity, impure, no locations ...
-./cxx-type.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o types.c types.y
-stderr:
-types.y:77.8-37: warning: unset value: $$ [-Wother]
-types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr]
-types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-./cxx-type.at:410: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o types types.c $LIBS
+./c++.at:1361: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./cxx-type.at:412:  $PREPARSER ./types test-input
@@ -258332,8 +258682,11 @@
 ./cxx-type.at:416: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o types types.c $LIBS
 stderr:
 stdout:
+./c++.at:858:  $PREPARSER ./input
+stderr:
+./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:1411: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./c++.at:858: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./c++.at:1555:  $PREPARSER ./test
@@ -258348,719 +258701,8 @@
 ./c++.at:1555: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
-./c++.at:1363:  $PREPARSER ./input aaaas
-stderr:
-exception caught: reduction
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input aaaal
-stderr:
-exception caught: yylex
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input i
-stderr:
-exception caught: initial-action
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input aaaap
-stderr:
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input --debug aaaap
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-0xffffc075a110->Object::Object { }
-0xffffc075a1e0->Object::Object { 0xffffc075a110 }
-0xffffc075a110->Object::~Object { 0xffffc075a110, 0xffffc075a1e0 }
-Next token is token 'a' (0xffffc075a1e0 'a')
-0xffffc075a120->Object::Object { 0xffffc075a1e0 }
-0xffffc075a0d0->Object::Object { 0xffffc075a120, 0xffffc075a1e0 }
-0xffffc075a0d0->Object::~Object { 0xffffc075a0d0, 0xffffc075a120, 0xffffc075a1e0 }
-0xffffc075a1e0->Object::~Object { 0xffffc075a120, 0xffffc075a1e0 }
-Shifting token 'a' (0xffffc075a120 'a')
-0xaaaae6d3cee0->Object::Object { 0xffffc075a120 }
-0xffffc075a0a8->Object::Object { 0xaaaae6d3cee0, 0xffffc075a120 }
-0xffffc075a0a8->Object::~Object { 0xaaaae6d3cee0, 0xffffc075a0a8, 0xffffc075a120 }
-0xffffc075a120->Object::~Object { 0xaaaae6d3cee0, 0xffffc075a120 }
-Entering state 1
-Stack now 0 1
-0xffffc075a200->Object::Object { 0xaaaae6d3cee0 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae6d3cee0 'a')
--> $$ = nterm item (0xffffc075a200 'a')
-0xaaaae6d3cee0->Object::~Object { 0xaaaae6d3cee0, 0xffffc075a200 }
-0xaaaae6d3cee0->Object::Object { 0xffffc075a200 }
-0xffffc075a1b0->Object::Object { 0xaaaae6d3cee0, 0xffffc075a200 }
-0xffffc075a1b0->Object::~Object { 0xaaaae6d3cee0, 0xffffc075a1b0, 0xffffc075a200 }
-0xffffc075a200->Object::~Object { 0xaaaae6d3cee0, 0xffffc075a200 }
-Entering state 10
-Stack now 0 10
-Reading a token
-0xffffc075a110->Object::Object { 0xaaaae6d3cee0 }
-0xffffc075a1e0->Object::Object { 0xaaaae6d3cee0, 0xffffc075a110 }
-0xffffc075a110->Object::~Object { 0xaaaae6d3cee0, 0xffffc075a110, 0xffffc075a1e0 }
-Next token is token 'a' (0xffffc075a1e0 'a')
-0xffffc075a120->Object::Object { 0xaaaae6d3cee0, 0xffffc075a1e0 }
-0xffffc075a0d0->Object::Object { 0xaaaae6d3cee0, 0xffffc075a120, 0xffffc075a1e0 }
-0xffffc075a0d0->Object::~Object { 0xaaaae6d3cee0, 0xffffc075a0d0, 0xffffc075a120, 0xffffc075a1e0 }
-0xffffc075a1e0->Object::~Object { 0xaaaae6d3cee0, 0xffffc075a120, 0xffffc075a1e0 }
-Shifting token 'a' (0xffffc075a120 'a')
-0xaaaae6d3cf00->Object::Object { 0xaaaae6d3cee0, 0xffffc075a120 }
-0xffffc075a0a8->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a120 }
-0xffffc075a0a8->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a0a8, 0xffffc075a120 }
-0xffffc075a120->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a120 }
-Entering state 1
-Stack now 0 10 1
-0xffffc075a200->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae6d3cf00 'a')
--> $$ = nterm item (0xffffc075a200 'a')
-0xaaaae6d3cf00->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a200 }
-0xaaaae6d3cf00->Object::Object { 0xaaaae6d3cee0, 0xffffc075a200 }
-0xffffc075a1b0->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a200 }
-0xffffc075a1b0->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a1b0, 0xffffc075a200 }
-0xffffc075a200->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a200 }
-Entering state 10
-Stack now 0 10 10
-Reading a token
-0xffffc075a110->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00 }
-0xffffc075a1e0->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a110 }
-0xffffc075a110->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a110, 0xffffc075a1e0 }
-Next token is token 'a' (0xffffc075a1e0 'a')
-0xffffc075a120->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a1e0 }
-0xffffc075a0d0->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a120, 0xffffc075a1e0 }
-0xffffc075a0d0->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a0d0, 0xffffc075a120, 0xffffc075a1e0 }
-0xffffc075a1e0->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a120, 0xffffc075a1e0 }
-Shifting token 'a' (0xffffc075a120 'a')
-0xaaaae6d3cf20->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a120 }
-0xffffc075a0a8->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a120 }
-0xffffc075a0a8->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a0a8, 0xffffc075a120 }
-0xffffc075a120->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a120 }
-Entering state 1
-Stack now 0 10 10 1
-0xffffc075a200->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae6d3cf20 'a')
--> $$ = nterm item (0xffffc075a200 'a')
-0xaaaae6d3cf20->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a200 }
-0xaaaae6d3cf20->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a200 }
-0xffffc075a1b0->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a200 }
-0xffffc075a1b0->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a1b0, 0xffffc075a200 }
-0xffffc075a200->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a200 }
-Entering state 10
-Stack now 0 10 10 10
-Reading a token
-0xffffc075a110->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20 }
-0xffffc075a1e0->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a110 }
-0xffffc075a110->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a110, 0xffffc075a1e0 }
-Next token is token 'a' (0xffffc075a1e0 'a')
-0xffffc075a120->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a1e0 }
-0xffffc075a0d0->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a120, 0xffffc075a1e0 }
-0xffffc075a0d0->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a0d0, 0xffffc075a120, 0xffffc075a1e0 }
-0xffffc075a1e0->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a120, 0xffffc075a1e0 }
-Shifting token 'a' (0xffffc075a120 'a')
-0xaaaae6d3cf40->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a120 }
-0xffffc075a0a8->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40, 0xffffc075a120 }
-0xffffc075a0a8->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40, 0xffffc075a0a8, 0xffffc075a120 }
-0xffffc075a120->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40, 0xffffc075a120 }
-Entering state 1
-Stack now 0 10 10 10 1
-0xffffc075a200->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae6d3cf40 'a')
--> $$ = nterm item (0xffffc075a200 'a')
-0xaaaae6d3cf40->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40, 0xffffc075a200 }
-0xaaaae6d3cf40->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a200 }
-0xffffc075a1b0->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40, 0xffffc075a200 }
-0xffffc075a1b0->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40, 0xffffc075a1b0, 0xffffc075a200 }
-0xffffc075a200->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40, 0xffffc075a200 }
-Entering state 10
-Stack now 0 10 10 10 10
-Reading a token
-0xffffc075a110->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40 }
-0xffffc075a1e0->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40, 0xffffc075a110 }
-0xffffc075a110->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40, 0xffffc075a110, 0xffffc075a1e0 }
-Next token is token 'p' (0xffffc075a1e0 'p'Exception caught: cleaning lookahead and stack
-0xaaaae6d3cf40->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40, 0xffffc075a1e0 }
-0xaaaae6d3cf20->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a1e0 }
-0xaaaae6d3cf00->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a1e0 }
-0xaaaae6d3cee0->Object::~Object { 0xaaaae6d3cee0, 0xffffc075a1e0 }
-0xffffc075a1e0->Object::~Object { 0xffffc075a1e0 }
-exception caught: printer
-end { }
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-0xffffc075a110->Object::Object { }
-0xffffc075a1e0->Object::Object { 0xffffc075a110 }
-0xffffc075a110->Object::~Object { 0xffffc075a110, 0xffffc075a1e0 }
-Next token is token 'a' (0xffffc075a1e0 'a')
-0xffffc075a120->Object::Object { 0xffffc075a1e0 }
-0xffffc075a0d0->Object::Object { 0xffffc075a120, 0xffffc075a1e0 }
-0xffffc075a0d0->Object::~Object { 0xffffc075a0d0, 0xffffc075a120, 0xffffc075a1e0 }
-0xffffc075a1e0->Object::~Object { 0xffffc075a120, 0xffffc075a1e0 }
-Shifting token 'a' (0xffffc075a120 'a')
-0xaaaae6d3cee0->Object::Object { 0xffffc075a120 }
-0xffffc075a0a8->Object::Object { 0xaaaae6d3cee0, 0xffffc075a120 }
-0xffffc075a0a8->Object::~Object { 0xaaaae6d3cee0, 0xffffc075a0a8, 0xffffc075a120 }
-0xffffc075a120->Object::~Object { 0xaaaae6d3cee0, 0xffffc075a120 }
-Entering state 1
-Stack now 0 1
-0xffffc075a200->Object::Object { 0xaaaae6d3cee0 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae6d3cee0 'a')
--> $$ = nterm item (0xffffc075a200 'a')
-0xaaaae6d3cee0->Object::~Object { 0xaaaae6d3cee0, 0xffffc075a200 }
-0xaaaae6d3cee0->Object::Object { 0xffffc075a200 }
-0xffffc075a1b0->Object::Object { 0xaaaae6d3cee0, 0xffffc075a200 }
-0xffffc075a1b0->Object::~Object { 0xaaaae6d3cee0, 0xffffc075a1b0, 0xffffc075a200 }
-0xffffc075a200->Object::~Object { 0xaaaae6d3cee0, 0xffffc075a200 }
-Entering state 10
-Stack now 0 10
-Reading a token
-0xffffc075a110->Object::Object { 0xaaaae6d3cee0 }
-0xffffc075a1e0->Object::Object { 0xaaaae6d3cee0, 0xffffc075a110 }
-0xffffc075a110->Object::~Object { 0xaaaae6d3cee0, 0xffffc075a110, 0xffffc075a1e0 }
-Next token is token 'a' (0xffffc075a1e0 'a')
-0xffffc075a120->Object::Object { 0xaaaae6d3cee0, 0xffffc075a1e0 }
-0xffffc075a0d0->Object::Object { 0xaaaae6d3cee0, 0xffffc075a120, 0xffffc075a1e0 }
-0xffffc075a0d0->Object::~Object { 0xaaaae6d3cee0, 0xffffc075a0d0, 0xffffc075a120, 0xffffc075a1e0 }
-0xffffc075a1e0->Object::~Object { 0xaaaae6d3cee0, 0xffffc075a120, 0xffffc075a1e0 }
-Shifting token 'a' (0xffffc075a120 'a')
-0xaaaae6d3cf00->Object::Object { 0xaaaae6d3cee0, 0xffffc075a120 }
-0xffffc075a0a8->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a120 }
-0xffffc075a0a8->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a0a8, 0xffffc075a120 }
-0xffffc075a120->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a120 }
-Entering state 1
-Stack now 0 10 1
-0xffffc075a200->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae6d3cf00 'a')
--> $$ = nterm item (0xffffc075a200 'a')
-0xaaaae6d3cf00->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a200 }
-0xaaaae6d3cf00->Object::Object { 0xaaaae6d3cee0, 0xffffc075a200 }
-0xffffc075a1b0->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a200 }
-0xffffc075a1b0->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a1b0, 0xffffc075a200 }
-0xffffc075a200->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a200 }
-Entering state 10
-Stack now 0 10 10
-Reading a token
-0xffffc075a110->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00 }
-0xffffc075a1e0->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a110 }
-0xffffc075a110->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a110, 0xffffc075a1e0 }
-Next token is token 'a' (0xffffc075a1e0 'a')
-0xffffc075a120->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a1e0 }
-0xffffc075a0d0->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a120, 0xffffc075a1e0 }
-0xffffc075a0d0->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a0d0, 0xffffc075a120, 0xffffc075a1e0 }
-0xffffc075a1e0->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a120, 0xffffc075a1e0 }
-Shifting token 'a' (0xffffc075a120 'a')
-0xaaaae6d3cf20->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a120 }
-0xffffc075a0a8->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a120 }
-0xffffc075a0a8->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a0a8, 0xffffc075a120 }
-0xffffc075a120->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a120 }
-Entering state 1
-Stack now 0 10 10 1
-0xffffc075a200->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae6d3cf20 'a')
--> $$ = nterm item (0xffffc075a200 'a')
-0xaaaae6d3cf20->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a200 }
-0xaaaae6d3cf20->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a200 }
-0xffffc075a1b0->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a200 }
-0xffffc075a1b0->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a1b0, 0xffffc075a200 }
-0xffffc075a200->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a200 }
-Entering state 10
-Stack now 0 10 10 10
-Reading a token
-0xffffc075a110->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20 }
-0xffffc075a1e0->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a110 }
-0xffffc075a110->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a110, 0xffffc075a1e0 }
-Next token is token 'a' (0xffffc075a1e0 'a')
-0xffffc075a120->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a1e0 }
-0xffffc075a0d0->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a120, 0xffffc075a1e0 }
-0xffffc075a0d0->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a0d0, 0xffffc075a120, 0xffffc075a1e0 }
-0xffffc075a1e0->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a120, 0xffffc075a1e0 }
-Shifting token 'a' (0xffffc075a120 'a')
-0xaaaae6d3cf40->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a120 }
-0xffffc075a0a8->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40, 0xffffc075a120 }
-0xffffc075a0a8->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40, 0xffffc075a0a8, 0xffffc075a120 }
-0xffffc075a120->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40, 0xffffc075a120 }
-Entering state 1
-Stack now 0 10 10 10 1
-0xffffc075a200->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae6d3cf40 'a')
--> $$ = nterm item (0xffffc075a200 'a')
-0xaaaae6d3cf40->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40, 0xffffc075a200 }
-0xaaaae6d3cf40->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a200 }
-0xffffc075a1b0->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40, 0xffffc075a200 }
-0xffffc075a1b0->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40, 0xffffc075a1b0, 0xffffc075a200 }
-0xffffc075a200->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40, 0xffffc075a200 }
-Entering state 10
-Stack now 0 10 10 10 10
-Reading a token
-0xffffc075a110->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40 }
-0xffffc075a1e0->Object::Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40, 0xffffc075a110 }
-0xffffc075a110->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40, 0xffffc075a110, 0xffffc075a1e0 }
-Next token is token 'p' (0xffffc075a1e0 'p'Exception caught: cleaning lookahead and stack
-0xaaaae6d3cf40->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xaaaae6d3cf40, 0xffffc075a1e0 }
-0xaaaae6d3cf20->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xaaaae6d3cf20, 0xffffc075a1e0 }
-0xaaaae6d3cf00->Object::~Object { 0xaaaae6d3cee0, 0xaaaae6d3cf00, 0xffffc075a1e0 }
-0xaaaae6d3cee0->Object::~Object { 0xaaaae6d3cee0, 0xffffc075a1e0 }
-0xffffc075a1e0->Object::~Object { 0xffffc075a1e0 }
-exception caught: printer
-end { }
-./c++.at:1363: grep '^exception caught: printer$' stderr
-stdout:
-exception caught: printer
-./c++.at:1363:  $PREPARSER ./input aaaae
-stderr:
-exception caught: syntax error
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input aaaaE
-stderr:
-exception caught: syntax error, unexpected end of file, expecting 'a'
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input aaaaT
-stderr:
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input aaaaR
-stderr:
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:1363: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./c++.at:1066:  $PREPARSER ./input < in
-stderr:
-error: invalid expression
-caught error
-error: invalid character
-caught error
-./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1066:  $PREPARSER ./input < in
-stderr:
-error: invalid expression
-./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1066:  $PREPARSER ./input < in
-stderr:
-error: invalid character
-./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-stderr:
-stdout:
-./c++.at:1066: ./check
-./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS
-stderr:
-stdout:
-./c++.at:858:  $PREPARSER ./input
-stderr:
-./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:858: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./c++.at:1361:  $PREPARSER ./input aaaas
-stderr:
-exception caught: reduction
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1361:  $PREPARSER ./input aaaal
-stderr:
-exception caught: yylex
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1361:  $PREPARSER ./input i
-stderr:
-exception caught: initial-action
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1361:  $PREPARSER ./input aaaap
-stderr:
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1361:  $PREPARSER ./input --debug aaaap
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-0xaaab1811db40->Object::Object { }
-Next token is token 'a' (0xaaab1811db40 'a')
-Shifting token 'a' (0xaaab1811db40 'a')
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab1811db40 'a')
--> $$ = nterm item (0xaaab1811db40 'a')
-Entering state 10
-Stack now 0 10
-Reading a token
-0xaaab1811db90->Object::Object { 0xaaab1811db40 }
-Next token is token 'a' (0xaaab1811db90 'a')
-Shifting token 'a' (0xaaab1811db90 'a')
-Entering state 1
-Stack now 0 10 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab1811db90 'a')
--> $$ = nterm item (0xaaab1811db90 'a')
-Entering state 10
-Stack now 0 10 10
-Reading a token
-0xaaab1811dbe0->Object::Object { 0xaaab1811db40, 0xaaab1811db90 }
-Next token is token 'a' (0xaaab1811dbe0 'a')
-Shifting token 'a' (0xaaab1811dbe0 'a')
-Entering state 1
-Stack now 0 10 10 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab1811dbe0 'a')
--> $$ = nterm item (0xaaab1811dbe0 'a')
-Entering state 10
-Stack now 0 10 10 10
-Reading a token
-0xaaab1811dc30->Object::Object { 0xaaab1811db40, 0xaaab1811db90, 0xaaab1811dbe0 }
-Next token is token 'a' (0xaaab1811dc30 'a')
-Shifting token 'a' (0xaaab1811dc30 'a')
-Entering state 1
-Stack now 0 10 10 10 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab1811dc30 'a')
--> $$ = nterm item (0xaaab1811dc30 'a')
-Entering state 10
-Stack now 0 10 10 10 10
-Reading a token
-0xaaab1811dc80->Object::Object { 0xaaab1811db40, 0xaaab1811db90, 0xaaab1811dbe0, 0xaaab1811dc30 }
-Next token is token 'p' (0xaaab1811dc80 'p'Exception caught: cleaning lookahead and stack
-0xaaab1811dc80->Object::~Object { 0xaaab1811db40, 0xaaab1811db90, 0xaaab1811dbe0, 0xaaab1811dc30, 0xaaab1811dc80 }
-0xaaab1811dc30->Object::~Object { 0xaaab1811db40, 0xaaab1811db90, 0xaaab1811dbe0, 0xaaab1811dc30 }
-0xaaab1811dbe0->Object::~Object { 0xaaab1811db40, 0xaaab1811db90, 0xaaab1811dbe0 }
-0xaaab1811db90->Object::~Object { 0xaaab1811db40, 0xaaab1811db90 }
-0xaaab1811db40->Object::~Object { 0xaaab1811db40 }
-exception caught: printer
-end { }
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-0xaaab1811db40->Object::Object { }
-Next token is token 'a' (0xaaab1811db40 'a')
-Shifting token 'a' (0xaaab1811db40 'a')
-Entering state 1
-Stack now 0 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab1811db40 'a')
--> $$ = nterm item (0xaaab1811db40 'a')
-Entering state 10
-Stack now 0 10
-Reading a token
-0xaaab1811db90->Object::Object { 0xaaab1811db40 }
-Next token is token 'a' (0xaaab1811db90 'a')
-Shifting token 'a' (0xaaab1811db90 'a')
-Entering state 1
-Stack now 0 10 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab1811db90 'a')
--> $$ = nterm item (0xaaab1811db90 'a')
-Entering state 10
-Stack now 0 10 10
-Reading a token
-0xaaab1811dbe0->Object::Object { 0xaaab1811db40, 0xaaab1811db90 }
-Next token is token 'a' (0xaaab1811dbe0 'a')
-Shifting token 'a' (0xaaab1811dbe0 'a')
-Entering state 1
-Stack now 0 10 10 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab1811dbe0 'a')
--> $$ = nterm item (0xaaab1811dbe0 'a')
-Entering state 10
-Stack now 0 10 10 10
-Reading a token
-0xaaab1811dc30->Object::Object { 0xaaab1811db40, 0xaaab1811db90, 0xaaab1811dbe0 }
-Next token is token 'a' (0xaaab1811dc30 'a')
-Shifting token 'a' (0xaaab1811dc30 'a')
-Entering state 1
-Stack now 0 10 10 10 1
-Reducing stack by rule 4 (line 147):
-   $1 = token 'a' (0xaaab1811dc30 'a')
--> $$ = nterm item (0xaaab1811dc30 'a')
-Entering state 10
-Stack now 0 10 10 10 10
-Reading a token
-0xaaab1811dc80->Object::Object { 0xaaab1811db40, 0xaaab1811db90, 0xaaab1811dbe0, 0xaaab1811dc30 }
-Next token is token 'p' (0xaaab1811dc80 'p'Exception caught: cleaning lookahead and stack
-0xaaab1811dc80->Object::~Object { 0xaaab1811db40, 0xaaab1811db90, 0xaaab1811dbe0, 0xaaab1811dc30, 0xaaab1811dc80 }
-0xaaab1811dc30->Object::~Object { 0xaaab1811db40, 0xaaab1811db90, 0xaaab1811dbe0, 0xaaab1811dc30 }
-0xaaab1811dbe0->Object::~Object { 0xaaab1811db40, 0xaaab1811db90, 0xaaab1811dbe0 }
-0xaaab1811db90->Object::~Object { 0xaaab1811db40, 0xaaab1811db90 }
-0xaaab1811db40->Object::~Object { 0xaaab1811db40 }
-exception caught: printer
-end { }
-./c++.at:1361: grep '^exception caught: printer$' stderr
-stdout:
-exception caught: printer
-./c++.at:1361:  $PREPARSER ./input aaaae
-stderr:
-exception caught: syntax error
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1361:  $PREPARSER ./input aaaaE
-stderr:
-exception caught: syntax error, unexpected end of file, expecting 'a'
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1361:  $PREPARSER ./input aaaaT
-stderr:
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1361:  $PREPARSER ./input aaaaR
-stderr:
-./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-686. c++.at:1361:  ok
-
-707. cxx-type.at:420: testing GLR: Resolve ambiguity, pure, no locations ...
-./cxx-type.at:421: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o types.c types.y
-stderr:
-types.y:77.8-37: warning: unset value: $$ [-Wother]
-types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr]
-types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-./cxx-type.at:421: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o types types.c $LIBS
-stderr:
-stdout:
-./c++.at:1362:  $PREPARSER ./input aaaas
-stderr:
-exception caught: reduction
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1362:  $PREPARSER ./input aaaal
-stderr:
-exception caught: yylex
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1362:  $PREPARSER ./input i
-stderr:
-exception caught: initial-action
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1362:  $PREPARSER ./input aaaap
-stderr:
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1362:  $PREPARSER ./input --debug aaaap
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-0xffffe729c440->Object::Object { }
-0xffffe729c510->Object::Object { 0xffffe729c440 }
-0xffffe729c440->Object::~Object { 0xffffe729c440, 0xffffe729c510 }
-Next token is token 'a' (0xffffe729c510 'a')
-0xffffe729c430->Object::Object { 0xffffe729c510 }
-0xffffe729c510->Object::~Object { 0xffffe729c430, 0xffffe729c510 }
-Shifting token 'a' (0xffffe729c430 'a')
-0xaaab146e9ee0->Object::Object { 0xffffe729c430 }
-0xffffe729c430->Object::~Object { 0xaaab146e9ee0, 0xffffe729c430 }
-Entering state 2
-Stack now 0 2
-0xffffe729c530->Object::Object { 0xaaab146e9ee0 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab146e9ee0 'a')
--> $$ = nterm item (0xffffe729c530 'a')
-0xaaab146e9ee0->Object::~Object { 0xaaab146e9ee0, 0xffffe729c530 }
-0xaaab146e9ee0->Object::Object { 0xffffe729c530 }
-0xffffe729c530->Object::~Object { 0xaaab146e9ee0, 0xffffe729c530 }
-Entering state 11
-Stack now 0 11
-Reading a token
-0xffffe729c440->Object::Object { 0xaaab146e9ee0 }
-0xffffe729c510->Object::Object { 0xaaab146e9ee0, 0xffffe729c440 }
-0xffffe729c440->Object::~Object { 0xaaab146e9ee0, 0xffffe729c440, 0xffffe729c510 }
-Next token is token 'a' (0xffffe729c510 'a')
-0xffffe729c430->Object::Object { 0xaaab146e9ee0, 0xffffe729c510 }
-0xffffe729c510->Object::~Object { 0xaaab146e9ee0, 0xffffe729c430, 0xffffe729c510 }
-Shifting token 'a' (0xffffe729c430 'a')
-0xaaab146e9f00->Object::Object { 0xaaab146e9ee0, 0xffffe729c430 }
-0xffffe729c430->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xffffe729c430 }
-Entering state 2
-Stack now 0 11 2
-0xffffe729c530->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab146e9f00 'a')
--> $$ = nterm item (0xffffe729c530 'a')
-0xaaab146e9f00->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xffffe729c530 }
-0xaaab146e9f00->Object::Object { 0xaaab146e9ee0, 0xffffe729c530 }
-0xffffe729c530->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xffffe729c530 }
-Entering state 11
-Stack now 0 11 11
-Reading a token
-0xffffe729c440->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00 }
-0xffffe729c510->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xffffe729c440 }
-0xffffe729c440->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xffffe729c440, 0xffffe729c510 }
-Next token is token 'a' (0xffffe729c510 'a')
-0xffffe729c430->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xffffe729c510 }
-0xffffe729c510->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xffffe729c430, 0xffffe729c510 }
-Shifting token 'a' (0xffffe729c430 'a')
-0xaaab146e9f20->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xffffe729c430 }
-0xffffe729c430->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xffffe729c430 }
-Entering state 2
-Stack now 0 11 11 2
-0xffffe729c530->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab146e9f20 'a')
--> $$ = nterm item (0xffffe729c530 'a')
-0xaaab146e9f20->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xffffe729c530 }
-0xaaab146e9f20->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xffffe729c530 }
-0xffffe729c530->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xffffe729c530 }
-Entering state 11
-Stack now 0 11 11 11
-Reading a token
-0xffffe729c440->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20 }
-0xffffe729c510->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xffffe729c440 }
-0xffffe729c440->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xffffe729c440, 0xffffe729c510 }
-Next token is token 'a' (0xffffe729c510 'a')
-0xffffe729c430->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xffffe729c510 }
-0xffffe729c510->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xffffe729c430, 0xffffe729c510 }
-Shifting token 'a' (0xffffe729c430 'a')
-0xaaab146e9f40->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xffffe729c430 }
-0xffffe729c430->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xaaab146e9f40, 0xffffe729c430 }
-Entering state 2
-Stack now 0 11 11 11 2
-0xffffe729c530->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xaaab146e9f40 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab146e9f40 'a')
--> $$ = nterm item (0xffffe729c530 'a')
-0xaaab146e9f40->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xaaab146e9f40, 0xffffe729c530 }
-0xaaab146e9f40->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xffffe729c530 }
-0xffffe729c530->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xaaab146e9f40, 0xffffe729c530 }
-Entering state 11
-Stack now 0 11 11 11 11
-Reading a token
-0xffffe729c440->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xaaab146e9f40 }
-0xffffe729c510->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xaaab146e9f40, 0xffffe729c440 }
-0xffffe729c440->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xaaab146e9f40, 0xffffe729c440, 0xffffe729c510 }
-Next token is token 'p' (0xffffe729c510 'p'Exception caught: cleaning lookahead and stack
-0xaaab146e9f40->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xaaab146e9f40, 0xffffe729c510 }
-0xaaab146e9f20->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xffffe729c510 }
-0xaaab146e9f00->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xffffe729c510 }
-0xaaab146e9ee0->Object::~Object { 0xaaab146e9ee0, 0xffffe729c510 }
-0xffffe729c510->Object::~Object { 0xffffe729c510 }
-exception caught: printer
-end { }
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-0xffffe729c440->Object::Object { }
-0xffffe729c510->Object::Object { 0xffffe729c440 }
-0xffffe729c440->Object::~Object { 0xffffe729c440, 0xffffe729c510 }
-Next token is token 'a' (0xffffe729c510 'a')
-0xffffe729c430->Object::Object { 0xffffe729c510 }
-0xffffe729c510->Object::~Object { 0xffffe729c430, 0xffffe729c510 }
-Shifting token 'a' (0xffffe729c430 'a')
-0xaaab146e9ee0->Object::Object { 0xffffe729c430 }
-0xffffe729c430->Object::~Object { 0xaaab146e9ee0, 0xffffe729c430 }
-Entering state 2
-Stack now 0 2
-0xffffe729c530->Object::Object { 0xaaab146e9ee0 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab146e9ee0 'a')
--> $$ = nterm item (0xffffe729c530 'a')
-0xaaab146e9ee0->Object::~Object { 0xaaab146e9ee0, 0xffffe729c530 }
-0xaaab146e9ee0->Object::Object { 0xffffe729c530 }
-0xffffe729c530->Object::~Object { 0xaaab146e9ee0, 0xffffe729c530 }
-Entering state 11
-Stack now 0 11
-Reading a token
-0xffffe729c440->Object::Object { 0xaaab146e9ee0 }
-0xffffe729c510->Object::Object { 0xaaab146e9ee0, 0xffffe729c440 }
-0xffffe729c440->Object::~Object { 0xaaab146e9ee0, 0xffffe729c440, 0xffffe729c510 }
-Next token is token 'a' (0xffffe729c510 'a')
-0xffffe729c430->Object::Object { 0xaaab146e9ee0, 0xffffe729c510 }
-0xffffe729c510->Object::~Object { 0xaaab146e9ee0, 0xffffe729c430, 0xffffe729c510 }
-Shifting token 'a' (0xffffe729c430 'a')
-0xaaab146e9f00->Object::Object { 0xaaab146e9ee0, 0xffffe729c430 }
-0xffffe729c430->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xffffe729c430 }
-Entering state 2
-Stack now 0 11 2
-0xffffe729c530->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab146e9f00 'a')
--> $$ = nterm item (0xffffe729c530 'a')
-0xaaab146e9f00->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xffffe729c530 }
-0xaaab146e9f00->Object::Object { 0xaaab146e9ee0, 0xffffe729c530 }
-0xffffe729c530->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xffffe729c530 }
-Entering state 11
-Stack now 0 11 11
-Reading a token
-0xffffe729c440->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00 }
-0xffffe729c510->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xffffe729c440 }
-0xffffe729c440->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xffffe729c440, 0xffffe729c510 }
-Next token is token 'a' (0xffffe729c510 'a')
-0xffffe729c430->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xffffe729c510 }
-0xffffe729c510->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xffffe729c430, 0xffffe729c510 }
-Shifting token 'a' (0xffffe729c430 'a')
-0xaaab146e9f20->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xffffe729c430 }
-0xffffe729c430->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xffffe729c430 }
-Entering state 2
-Stack now 0 11 11 2
-0xffffe729c530->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab146e9f20 'a')
--> $$ = nterm item (0xffffe729c530 'a')
-0xaaab146e9f20->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xffffe729c530 }
-0xaaab146e9f20->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xffffe729c530 }
-0xffffe729c530->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xffffe729c530 }
-Entering state 11
-Stack now 0 11 11 11
-Reading a token
-0xffffe729c440->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20 }
-0xffffe729c510->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xffffe729c440 }
-0xffffe729c440->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xffffe729c440, 0xffffe729c510 }
-Next token is token 'a' (0xffffe729c510 'a')
-0xffffe729c430->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xffffe729c510 }
-0xffffe729c510->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xffffe729c430, 0xffffe729c510 }
-Shifting token 'a' (0xffffe729c430 'a')
-0xaaab146e9f40->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xffffe729c430 }
-0xffffe729c430->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xaaab146e9f40, 0xffffe729c430 }
-Entering state 2
-Stack now 0 11 11 11 2
-0xffffe729c530->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xaaab146e9f40 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab146e9f40 'a')
--> $$ = nterm item (0xffffe729c530 'a')
-0xaaab146e9f40->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xaaab146e9f40, 0xffffe729c530 }
-0xaaab146e9f40->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xffffe729c530 }
-0xffffe729c530->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xaaab146e9f40, 0xffffe729c530 }
-Entering state 11
-Stack now 0 11 11 11 11
-Reading a token
-0xffffe729c440->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xaaab146e9f40 }
-0xffffe729c510->Object::Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xaaab146e9f40, 0xffffe729c440 }
-0xffffe729c440->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xaaab146e9f40, 0xffffe729c440, 0xffffe729c510 }
-Next token is token 'p' (0xffffe729c510 'p'Exception caught: cleaning lookahead and stack
-0xaaab146e9f40->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xaaab146e9f40, 0xffffe729c510 }
-0xaaab146e9f20->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xaaab146e9f20, 0xffffe729c510 }
-0xaaab146e9f00->Object::~Object { 0xaaab146e9ee0, 0xaaab146e9f00, 0xffffe729c510 }
-0xaaab146e9ee0->Object::~Object { 0xaaab146e9ee0, 0xffffe729c510 }
-0xffffe729c510->Object::~Object { 0xffffe729c510 }
-exception caught: printer
-end { }
-./c++.at:1362: grep '^exception caught: printer$' stderr
-stdout:
-exception caught: printer
-./c++.at:1362:  $PREPARSER ./input aaaae
-stderr:
-exception caught: syntax error
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1362:  $PREPARSER ./input aaaaE
-stderr:
-exception caught: syntax error, unexpected end of file, expecting 'a'
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1362:  $PREPARSER ./input aaaaT
-stderr:
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1362:  $PREPARSER ./input aaaaR
-stderr:
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:1362: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./c++.at:1411: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./cxx-type.at:417:  $PREPARSER ./types test-input
@@ -260205,13 +259847,669 @@
 Cleanup: popping nterm prog (1.1-19.5: )
 706. cxx-type.at:415:  ok
 
+707. cxx-type.at:420: testing GLR: Resolve ambiguity, pure, no locations ...
+./cxx-type.at:421: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o types.c types.y
+stderr:
+stdout:
+./c++.at:1363:  $PREPARSER ./input aaaas
+stderr:
+exception caught: reduction
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input aaaal
+stderr:
+exception caught: yylex
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input i
+stderr:
+exception caught: initial-action
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input aaaap
+stderr:
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+types.y:77.8-37: warning: unset value: $$ [-Wother]
+types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr]
+types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+./cxx-type.at:421: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o types types.c $LIBS
+./c++.at:1363:  $PREPARSER ./input --debug aaaap
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+0xfffff8aade40->Object::Object { }
+0xfffff8aadf30->Object::Object { 0xfffff8aade40 }
+0xfffff8aade40->Object::~Object { 0xfffff8aade40, 0xfffff8aadf30 }
+Next token is token 'a' (0xfffff8aadf30 'a')
+0xfffff8aade50->Object::Object { 0xfffff8aadf30 }
+0xfffff8aadf30->Object::~Object { 0xfffff8aade50, 0xfffff8aadf30 }
+Shifting token 'a' (0xfffff8aade50 'a')
+0xaaaae015dee0->Object::Object { 0xfffff8aade50 }
+0xfffff8aade50->Object::~Object { 0xaaaae015dee0, 0xfffff8aade50 }
+Entering state 1
+Stack now 0 1
+0xfffff8aadf50->Object::Object { 0xaaaae015dee0 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaae015dee0 'a')
+-> $$ = nterm item (0xfffff8aadf50 'a')
+0xaaaae015dee0->Object::~Object { 0xaaaae015dee0, 0xfffff8aadf50 }
+0xaaaae015dee0->Object::Object { 0xfffff8aadf50 }
+0xfffff8aadf50->Object::~Object { 0xaaaae015dee0, 0xfffff8aadf50 }
+Entering state 10
+Stack now 0 10
+Reading a token
+0xfffff8aade40->Object::Object { 0xaaaae015dee0 }
+0xfffff8aadf30->Object::Object { 0xaaaae015dee0, 0xfffff8aade40 }
+0xfffff8aade40->Object::~Object { 0xaaaae015dee0, 0xfffff8aade40, 0xfffff8aadf30 }
+Next token is token 'a' (0xfffff8aadf30 'a')
+0xfffff8aade50->Object::Object { 0xaaaae015dee0, 0xfffff8aadf30 }
+0xfffff8aadf30->Object::~Object { 0xaaaae015dee0, 0xfffff8aade50, 0xfffff8aadf30 }
+Shifting token 'a' (0xfffff8aade50 'a')
+0xaaaae015df00->Object::Object { 0xaaaae015dee0, 0xfffff8aade50 }
+0xfffff8aade50->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xfffff8aade50 }
+Entering state 1
+Stack now 0 10 1
+0xfffff8aadf50->Object::Object { 0xaaaae015dee0, 0xaaaae015df00 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaae015df00 'a')
+-> $$ = nterm item (0xfffff8aadf50 'a')
+0xaaaae015df00->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xfffff8aadf50 }
+0xaaaae015df00->Object::Object { 0xaaaae015dee0, 0xfffff8aadf50 }
+0xfffff8aadf50->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xfffff8aadf50 }
+Entering state 10
+Stack now 0 10 10
+Reading a token
+0xfffff8aade40->Object::Object { 0xaaaae015dee0, 0xaaaae015df00 }
+0xfffff8aadf30->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xfffff8aade40 }
+0xfffff8aade40->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xfffff8aade40, 0xfffff8aadf30 }
+Next token is token 'a' (0xfffff8aadf30 'a')
+0xfffff8aade50->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xfffff8aadf30 }
+0xfffff8aadf30->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xfffff8aade50, 0xfffff8aadf30 }
+Shifting token 'a' (0xfffff8aade50 'a')
+0xaaaae015df20->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xfffff8aade50 }
+0xfffff8aade50->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xfffff8aade50 }
+Entering state 1
+Stack now 0 10 10 1
+0xfffff8aadf50->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaae015df20 'a')
+-> $$ = nterm item (0xfffff8aadf50 'a')
+0xaaaae015df20->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xfffff8aadf50 }
+0xaaaae015df20->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xfffff8aadf50 }
+0xfffff8aadf50->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xfffff8aadf50 }
+Entering state 10
+Stack now 0 10 10 10
+Reading a token
+0xfffff8aade40->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20 }
+0xfffff8aadf30->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xfffff8aade40 }
+0xfffff8aade40->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xfffff8aade40, 0xfffff8aadf30 }
+Next token is token 'a' (0xfffff8aadf30 'a')
+0xfffff8aade50->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xfffff8aadf30 }
+0xfffff8aadf30->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xfffff8aade50, 0xfffff8aadf30 }
+Shifting token 'a' (0xfffff8aade50 'a')
+0xaaaae015df40->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xfffff8aade50 }
+0xfffff8aade50->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xaaaae015df40, 0xfffff8aade50 }
+Entering state 1
+Stack now 0 10 10 10 1
+0xfffff8aadf50->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xaaaae015df40 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaae015df40 'a')
+-> $$ = nterm item (0xfffff8aadf50 'a')
+0xaaaae015df40->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xaaaae015df40, 0xfffff8aadf50 }
+0xaaaae015df40->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xfffff8aadf50 }
+0xfffff8aadf50->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xaaaae015df40, 0xfffff8aadf50 }
+Entering state 10
+Stack now 0 10 10 10 10
+Reading a token
+0xfffff8aade40->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xaaaae015df40 }
+0xfffff8aadf30->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xaaaae015df40, 0xfffff8aade40 }
+0xfffff8aade40->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xaaaae015df40, 0xfffff8aade40, 0xfffff8aadf30 }
+Next token is token 'p' (0xfffff8aadf30 'p'Exception caught: cleaning lookahead and stack
+0xaaaae015df40->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xaaaae015df40, 0xfffff8aadf30 }
+0xaaaae015df20->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xfffff8aadf30 }
+0xaaaae015df00->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xfffff8aadf30 }
+0xaaaae015dee0->Object::~Object { 0xaaaae015dee0, 0xfffff8aadf30 }
+0xfffff8aadf30->Object::~Object { 0xfffff8aadf30 }
+exception caught: printer
+end { }
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+0xfffff8aade40->Object::Object { }
+0xfffff8aadf30->Object::Object { 0xfffff8aade40 }
+0xfffff8aade40->Object::~Object { 0xfffff8aade40, 0xfffff8aadf30 }
+Next token is token 'a' (0xfffff8aadf30 'a')
+0xfffff8aade50->Object::Object { 0xfffff8aadf30 }
+0xfffff8aadf30->Object::~Object { 0xfffff8aade50, 0xfffff8aadf30 }
+Shifting token 'a' (0xfffff8aade50 'a')
+0xaaaae015dee0->Object::Object { 0xfffff8aade50 }
+0xfffff8aade50->Object::~Object { 0xaaaae015dee0, 0xfffff8aade50 }
+Entering state 1
+Stack now 0 1
+0xfffff8aadf50->Object::Object { 0xaaaae015dee0 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaae015dee0 'a')
+-> $$ = nterm item (0xfffff8aadf50 'a')
+0xaaaae015dee0->Object::~Object { 0xaaaae015dee0, 0xfffff8aadf50 }
+0xaaaae015dee0->Object::Object { 0xfffff8aadf50 }
+0xfffff8aadf50->Object::~Object { 0xaaaae015dee0, 0xfffff8aadf50 }
+Entering state 10
+Stack now 0 10
+Reading a token
+0xfffff8aade40->Object::Object { 0xaaaae015dee0 }
+0xfffff8aadf30->Object::Object { 0xaaaae015dee0, 0xfffff8aade40 }
+0xfffff8aade40->Object::~Object { 0xaaaae015dee0, 0xfffff8aade40, 0xfffff8aadf30 }
+Next token is token 'a' (0xfffff8aadf30 'a')
+0xfffff8aade50->Object::Object { 0xaaaae015dee0, 0xfffff8aadf30 }
+0xfffff8aadf30->Object::~Object { 0xaaaae015dee0, 0xfffff8aade50, 0xfffff8aadf30 }
+Shifting token 'a' (0xfffff8aade50 'a')
+0xaaaae015df00->Object::Object { 0xaaaae015dee0, 0xfffff8aade50 }
+0xfffff8aade50->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xfffff8aade50 }
+Entering state 1
+Stack now 0 10 1
+0xfffff8aadf50->Object::Object { 0xaaaae015dee0, 0xaaaae015df00 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaae015df00 'a')
+-> $$ = nterm item (0xfffff8aadf50 'a')
+0xaaaae015df00->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xfffff8aadf50 }
+0xaaaae015df00->Object::Object { 0xaaaae015dee0, 0xfffff8aadf50 }
+0xfffff8aadf50->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xfffff8aadf50 }
+Entering state 10
+Stack now 0 10 10
+Reading a token
+0xfffff8aade40->Object::Object { 0xaaaae015dee0, 0xaaaae015df00 }
+0xfffff8aadf30->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xfffff8aade40 }
+0xfffff8aade40->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xfffff8aade40, 0xfffff8aadf30 }
+Next token is token 'a' (0xfffff8aadf30 'a')
+0xfffff8aade50->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xfffff8aadf30 }
+0xfffff8aadf30->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xfffff8aade50, 0xfffff8aadf30 }
+Shifting token 'a' (0xfffff8aade50 'a')
+0xaaaae015df20->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xfffff8aade50 }
+0xfffff8aade50->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xfffff8aade50 }
+Entering state 1
+Stack now 0 10 10 1
+0xfffff8aadf50->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaae015df20 'a')
+-> $$ = nterm item (0xfffff8aadf50 'a')
+0xaaaae015df20->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xfffff8aadf50 }
+0xaaaae015df20->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xfffff8aadf50 }
+0xfffff8aadf50->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xfffff8aadf50 }
+Entering state 10
+Stack now 0 10 10 10
+Reading a token
+0xfffff8aade40->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20 }
+0xfffff8aadf30->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xfffff8aade40 }
+0xfffff8aade40->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xfffff8aade40, 0xfffff8aadf30 }
+Next token is token 'a' (0xfffff8aadf30 'a')
+0xfffff8aade50->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xfffff8aadf30 }
+0xfffff8aadf30->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xfffff8aade50, 0xfffff8aadf30 }
+Shifting token 'a' (0xfffff8aade50 'a')
+0xaaaae015df40->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xfffff8aade50 }
+0xfffff8aade50->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xaaaae015df40, 0xfffff8aade50 }
+Entering state 1
+Stack now 0 10 10 10 1
+0xfffff8aadf50->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xaaaae015df40 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaae015df40 'a')
+-> $$ = nterm item (0xfffff8aadf50 'a')
+0xaaaae015df40->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xaaaae015df40, 0xfffff8aadf50 }
+0xaaaae015df40->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xfffff8aadf50 }
+0xfffff8aadf50->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xaaaae015df40, 0xfffff8aadf50 }
+Entering state 10
+Stack now 0 10 10 10 10
+Reading a token
+0xfffff8aade40->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xaaaae015df40 }
+0xfffff8aadf30->Object::Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xaaaae015df40, 0xfffff8aade40 }
+0xfffff8aade40->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xaaaae015df40, 0xfffff8aade40, 0xfffff8aadf30 }
+Next token is token 'p' (0xfffff8aadf30 'p'Exception caught: cleaning lookahead and stack
+0xaaaae015df40->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xaaaae015df40, 0xfffff8aadf30 }
+0xaaaae015df20->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xaaaae015df20, 0xfffff8aadf30 }
+0xaaaae015df00->Object::~Object { 0xaaaae015dee0, 0xaaaae015df00, 0xfffff8aadf30 }
+0xaaaae015dee0->Object::~Object { 0xaaaae015dee0, 0xfffff8aadf30 }
+0xfffff8aadf30->Object::~Object { 0xfffff8aadf30 }
+exception caught: printer
+end { }
+./c++.at:1363: grep '^exception caught: printer$' stderr
+stdout:
+exception caught: printer
+./c++.at:1363:  $PREPARSER ./input aaaae
+stderr:
+exception caught: syntax error
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input aaaaE
+stderr:
+exception caught: syntax error, unexpected end of file, expecting 'a'
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input aaaaT
+stderr:
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input aaaaR
+stderr:
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:1363: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./c++.at:858:  $PREPARSER ./input
+stderr:
+./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
+======== Testing with C++ standard flags: ''
+./c++.at:859: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./c++.at:1361:  $PREPARSER ./input aaaas
+stderr:
+exception caught: reduction
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1361:  $PREPARSER ./input aaaal
+stderr:
+exception caught: yylex
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1361:  $PREPARSER ./input i
+stderr:
+exception caught: initial-action
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1361:  $PREPARSER ./input aaaap
+stderr:
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1361:  $PREPARSER ./input --debug aaaap
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+0xaaaae2c36b40->Object::Object { }
+Next token is token 'a' (0xaaaae2c36b40 'a')
+Shifting token 'a' (0xaaaae2c36b40 'a')
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaaae2c36b40 'a')
+-> $$ = nterm item (0xaaaae2c36b40 'a')
+Entering state 10
+Stack now 0 10
+Reading a token
+0xaaaae2c36b90->Object::Object { 0xaaaae2c36b40 }
+Next token is token 'a' (0xaaaae2c36b90 'a')
+Shifting token 'a' (0xaaaae2c36b90 'a')
+Entering state 1
+Stack now 0 10 1
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaaae2c36b90 'a')
+-> $$ = nterm item (0xaaaae2c36b90 'a')
+Entering state 10
+Stack now 0 10 10
+Reading a token
+0xaaaae2c36be0->Object::Object { 0xaaaae2c36b40, 0xaaaae2c36b90 }
+Next token is token 'a' (0xaaaae2c36be0 'a')
+Shifting token 'a' (0xaaaae2c36be0 'a')
+Entering state 1
+Stack now 0 10 10 1
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaaae2c36be0 'a')
+-> $$ = nterm item (0xaaaae2c36be0 'a')
+Entering state 10
+Stack now 0 10 10 10
+Reading a token
+0xaaaae2c36c30->Object::Object { 0xaaaae2c36b40, 0xaaaae2c36b90, 0xaaaae2c36be0 }
+Next token is token 'a' (0xaaaae2c36c30 'a')
+Shifting token 'a' (0xaaaae2c36c30 'a')
+Entering state 1
+Stack now 0 10 10 10 1
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaaae2c36c30 'a')
+-> $$ = nterm item (0xaaaae2c36c30 'a')
+Entering state 10
+Stack now 0 10 10 10 10
+Reading a token
+0xaaaae2c36c80->Object::Object { 0xaaaae2c36b40, 0xaaaae2c36b90, 0xaaaae2c36be0, 0xaaaae2c36c30 }
+Next token is token 'p' (0xaaaae2c36c80 'p'Exception caught: cleaning lookahead and stack
+0xaaaae2c36c80->Object::~Object { 0xaaaae2c36b40, 0xaaaae2c36b90, 0xaaaae2c36be0, 0xaaaae2c36c30, 0xaaaae2c36c80 }
+0xaaaae2c36c30->Object::~Object { 0xaaaae2c36b40, 0xaaaae2c36b90, 0xaaaae2c36be0, 0xaaaae2c36c30 }
+0xaaaae2c36be0->Object::~Object { 0xaaaae2c36b40, 0xaaaae2c36b90, 0xaaaae2c36be0 }
+0xaaaae2c36b90->Object::~Object { 0xaaaae2c36b40, 0xaaaae2c36b90 }
+0xaaaae2c36b40->Object::~Object { 0xaaaae2c36b40 }
+exception caught: printer
+end { }
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+0xaaaae2c36b40->Object::Object { }
+Next token is token 'a' (0xaaaae2c36b40 'a')
+Shifting token 'a' (0xaaaae2c36b40 'a')
+Entering state 1
+Stack now 0 1
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaaae2c36b40 'a')
+-> $$ = nterm item (0xaaaae2c36b40 'a')
+Entering state 10
+Stack now 0 10
+Reading a token
+0xaaaae2c36b90->Object::Object { 0xaaaae2c36b40 }
+Next token is token 'a' (0xaaaae2c36b90 'a')
+Shifting token 'a' (0xaaaae2c36b90 'a')
+Entering state 1
+Stack now 0 10 1
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaaae2c36b90 'a')
+-> $$ = nterm item (0xaaaae2c36b90 'a')
+Entering state 10
+Stack now 0 10 10
+Reading a token
+0xaaaae2c36be0->Object::Object { 0xaaaae2c36b40, 0xaaaae2c36b90 }
+Next token is token 'a' (0xaaaae2c36be0 'a')
+Shifting token 'a' (0xaaaae2c36be0 'a')
+Entering state 1
+Stack now 0 10 10 1
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaaae2c36be0 'a')
+-> $$ = nterm item (0xaaaae2c36be0 'a')
+Entering state 10
+Stack now 0 10 10 10
+Reading a token
+0xaaaae2c36c30->Object::Object { 0xaaaae2c36b40, 0xaaaae2c36b90, 0xaaaae2c36be0 }
+Next token is token 'a' (0xaaaae2c36c30 'a')
+Shifting token 'a' (0xaaaae2c36c30 'a')
+Entering state 1
+Stack now 0 10 10 10 1
+Reducing stack by rule 4 (line 147):
+   $1 = token 'a' (0xaaaae2c36c30 'a')
+-> $$ = nterm item (0xaaaae2c36c30 'a')
+Entering state 10
+Stack now 0 10 10 10 10
+Reading a token
+0xaaaae2c36c80->Object::Object { 0xaaaae2c36b40, 0xaaaae2c36b90, 0xaaaae2c36be0, 0xaaaae2c36c30 }
+Next token is token 'p' (0xaaaae2c36c80 'p'Exception caught: cleaning lookahead and stack
+0xaaaae2c36c80->Object::~Object { 0xaaaae2c36b40, 0xaaaae2c36b90, 0xaaaae2c36be0, 0xaaaae2c36c30, 0xaaaae2c36c80 }
+0xaaaae2c36c30->Object::~Object { 0xaaaae2c36b40, 0xaaaae2c36b90, 0xaaaae2c36be0, 0xaaaae2c36c30 }
+0xaaaae2c36be0->Object::~Object { 0xaaaae2c36b40, 0xaaaae2c36b90, 0xaaaae2c36be0 }
+0xaaaae2c36b90->Object::~Object { 0xaaaae2c36b40, 0xaaaae2c36b90 }
+0xaaaae2c36b40->Object::~Object { 0xaaaae2c36b40 }
+exception caught: printer
+end { }
+./c++.at:1361: grep '^exception caught: printer$' stderr
+stdout:
+exception caught: printer
+./c++.at:1361:  $PREPARSER ./input aaaae
+stderr:
+exception caught: syntax error
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1361:  $PREPARSER ./input aaaaE
+stderr:
+exception caught: syntax error, unexpected end of file, expecting 'a'
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1361:  $PREPARSER ./input aaaaT
+stderr:
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1361:  $PREPARSER ./input aaaaR
+stderr:
+./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+686. c++.at:1361:  ok
+
+stderr:
+stdout:
+./c++.at:1362:  $PREPARSER ./input aaaas
+stderr:
+exception caught: reduction
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1362:  $PREPARSER ./input aaaal
+stderr:
+exception caught: yylex
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1362:  $PREPARSER ./input i
+stderr:
+exception caught: initial-action
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1362:  $PREPARSER ./input aaaap
+stderr:
 708. cxx-type.at:426: testing GLR: Resolve ambiguity, pure, locations ...
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./cxx-type.at:427: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o types.c types.y
+./c++.at:1362:  $PREPARSER ./input --debug aaaap
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+0xffffe66a6950->Object::Object { }
+0xffffe66a6a20->Object::Object { 0xffffe66a6950 }
+0xffffe66a6950->Object::~Object { 0xffffe66a6950, 0xffffe66a6a20 }
+Next token is token 'a' (0xffffe66a6a20 'a')
+0xffffe66a6940->Object::Object { 0xffffe66a6a20 }
+0xffffe66a6a20->Object::~Object { 0xffffe66a6940, 0xffffe66a6a20 }
+Shifting token 'a' (0xffffe66a6940 'a')
+0xaaaaf4da4ee0->Object::Object { 0xffffe66a6940 }
+0xffffe66a6940->Object::~Object { 0xaaaaf4da4ee0, 0xffffe66a6940 }
+Entering state 2
+Stack now 0 2
+0xffffe66a6a40->Object::Object { 0xaaaaf4da4ee0 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaf4da4ee0 'a')
+-> $$ = nterm item (0xffffe66a6a40 'a')
+0xaaaaf4da4ee0->Object::~Object { 0xaaaaf4da4ee0, 0xffffe66a6a40 }
+0xaaaaf4da4ee0->Object::Object { 0xffffe66a6a40 }
+0xffffe66a6a40->Object::~Object { 0xaaaaf4da4ee0, 0xffffe66a6a40 }
+Entering state 11
+Stack now 0 11
+Reading a token
+0xffffe66a6950->Object::Object { 0xaaaaf4da4ee0 }
+0xffffe66a6a20->Object::Object { 0xaaaaf4da4ee0, 0xffffe66a6950 }
+0xffffe66a6950->Object::~Object { 0xaaaaf4da4ee0, 0xffffe66a6950, 0xffffe66a6a20 }
+Next token is token 'a' (0xffffe66a6a20 'a')
+0xffffe66a6940->Object::Object { 0xaaaaf4da4ee0, 0xffffe66a6a20 }
+0xffffe66a6a20->Object::~Object { 0xaaaaf4da4ee0, 0xffffe66a6940, 0xffffe66a6a20 }
+Shifting token 'a' (0xffffe66a6940 'a')
+0xaaaaf4da4f00->Object::Object { 0xaaaaf4da4ee0, 0xffffe66a6940 }
+0xffffe66a6940->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xffffe66a6940 }
+Entering state 2
+Stack now 0 11 2
+0xffffe66a6a40->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaf4da4f00 'a')
+-> $$ = nterm item (0xffffe66a6a40 'a')
+0xaaaaf4da4f00->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xffffe66a6a40 }
+0xaaaaf4da4f00->Object::Object { 0xaaaaf4da4ee0, 0xffffe66a6a40 }
+0xffffe66a6a40->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xffffe66a6a40 }
+Entering state 11
+Stack now 0 11 11
+Reading a token
+0xffffe66a6950->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00 }
+0xffffe66a6a20->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xffffe66a6950 }
+0xffffe66a6950->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xffffe66a6950, 0xffffe66a6a20 }
+Next token is token 'a' (0xffffe66a6a20 'a')
+0xffffe66a6940->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xffffe66a6a20 }
+0xffffe66a6a20->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xffffe66a6940, 0xffffe66a6a20 }
+Shifting token 'a' (0xffffe66a6940 'a')
+0xaaaaf4da4f20->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xffffe66a6940 }
+0xffffe66a6940->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xffffe66a6940 }
+Entering state 2
+Stack now 0 11 11 2
+0xffffe66a6a40->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaf4da4f20 'a')
+-> $$ = nterm item (0xffffe66a6a40 'a')
+0xaaaaf4da4f20->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xffffe66a6a40 }
+0xaaaaf4da4f20->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xffffe66a6a40 }
+0xffffe66a6a40->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xffffe66a6a40 }
+Entering state 11
+Stack now 0 11 11 11
+Reading a token
+0xffffe66a6950->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20 }
+0xffffe66a6a20->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xffffe66a6950 }
+0xffffe66a6950->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xffffe66a6950, 0xffffe66a6a20 }
+Next token is token 'a' (0xffffe66a6a20 'a')
+0xffffe66a6940->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xffffe66a6a20 }
+0xffffe66a6a20->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xffffe66a6940, 0xffffe66a6a20 }
+Shifting token 'a' (0xffffe66a6940 'a')
+0xaaaaf4da4f40->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xffffe66a6940 }
+0xffffe66a6940->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xaaaaf4da4f40, 0xffffe66a6940 }
+Entering state 2
+Stack now 0 11 11 11 2
+0xffffe66a6a40->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xaaaaf4da4f40 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaf4da4f40 'a')
+-> $$ = nterm item (0xffffe66a6a40 'a')
+0xaaaaf4da4f40->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xaaaaf4da4f40, 0xffffe66a6a40 }
+0xaaaaf4da4f40->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xffffe66a6a40 }
+0xffffe66a6a40->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xaaaaf4da4f40, 0xffffe66a6a40 }
+Entering state 11
+Stack now 0 11 11 11 11
+Reading a token
+0xffffe66a6950->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xaaaaf4da4f40 }
+0xffffe66a6a20->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xaaaaf4da4f40, 0xffffe66a6950 }
+0xffffe66a6950->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xaaaaf4da4f40, 0xffffe66a6950, 0xffffe66a6a20 }
+Next token is token 'p' (0xffffe66a6a20 'p'Exception caught: cleaning lookahead and stack
+0xaaaaf4da4f40->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xaaaaf4da4f40, 0xffffe66a6a20 }
+0xaaaaf4da4f20->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xffffe66a6a20 }
+0xaaaaf4da4f00->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xffffe66a6a20 }
+0xaaaaf4da4ee0->Object::~Object { 0xaaaaf4da4ee0, 0xffffe66a6a20 }
+0xffffe66a6a20->Object::~Object { 0xffffe66a6a20 }
+exception caught: printer
+end { }
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+0xffffe66a6950->Object::Object { }
+0xffffe66a6a20->Object::Object { 0xffffe66a6950 }
+0xffffe66a6950->Object::~Object { 0xffffe66a6950, 0xffffe66a6a20 }
+Next token is token 'a' (0xffffe66a6a20 'a')
+0xffffe66a6940->Object::Object { 0xffffe66a6a20 }
+0xffffe66a6a20->Object::~Object { 0xffffe66a6940, 0xffffe66a6a20 }
+Shifting token 'a' (0xffffe66a6940 'a')
+0xaaaaf4da4ee0->Object::Object { 0xffffe66a6940 }
+0xffffe66a6940->Object::~Object { 0xaaaaf4da4ee0, 0xffffe66a6940 }
+Entering state 2
+Stack now 0 2
+0xffffe66a6a40->Object::Object { 0xaaaaf4da4ee0 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaf4da4ee0 'a')
+-> $$ = nterm item (0xffffe66a6a40 'a')
+0xaaaaf4da4ee0->Object::~Object { 0xaaaaf4da4ee0, 0xffffe66a6a40 }
+0xaaaaf4da4ee0->Object::Object { 0xffffe66a6a40 }
+0xffffe66a6a40->Object::~Object { 0xaaaaf4da4ee0, 0xffffe66a6a40 }
+Entering state 11
+Stack now 0 11
+Reading a token
+0xffffe66a6950->Object::Object { 0xaaaaf4da4ee0 }
+0xffffe66a6a20->Object::Object { 0xaaaaf4da4ee0, 0xffffe66a6950 }
+0xffffe66a6950->Object::~Object { 0xaaaaf4da4ee0, 0xffffe66a6950, 0xffffe66a6a20 }
+Next token is token 'a' (0xffffe66a6a20 'a')
+0xffffe66a6940->Object::Object { 0xaaaaf4da4ee0, 0xffffe66a6a20 }
+0xffffe66a6a20->Object::~Object { 0xaaaaf4da4ee0, 0xffffe66a6940, 0xffffe66a6a20 }
+Shifting token 'a' (0xffffe66a6940 'a')
+0xaaaaf4da4f00->Object::Object { 0xaaaaf4da4ee0, 0xffffe66a6940 }
+0xffffe66a6940->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xffffe66a6940 }
+Entering state 2
+Stack now 0 11 2
+0xffffe66a6a40->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaf4da4f00 'a')
+-> $$ = nterm item (0xffffe66a6a40 'a')
+0xaaaaf4da4f00->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xffffe66a6a40 }
+0xaaaaf4da4f00->Object::Object { 0xaaaaf4da4ee0, 0xffffe66a6a40 }
+0xffffe66a6a40->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xffffe66a6a40 }
+Entering state 11
+Stack now 0 11 11
+Reading a token
+0xffffe66a6950->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00 }
+0xffffe66a6a20->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xffffe66a6950 }
+0xffffe66a6950->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xffffe66a6950, 0xffffe66a6a20 }
+Next token is token 'a' (0xffffe66a6a20 'a')
+0xffffe66a6940->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xffffe66a6a20 }
+0xffffe66a6a20->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xffffe66a6940, 0xffffe66a6a20 }
+Shifting token 'a' (0xffffe66a6940 'a')
+0xaaaaf4da4f20->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xffffe66a6940 }
+0xffffe66a6940->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xffffe66a6940 }
+Entering state 2
+Stack now 0 11 11 2
+0xffffe66a6a40->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaf4da4f20 'a')
+-> $$ = nterm item (0xffffe66a6a40 'a')
+0xaaaaf4da4f20->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xffffe66a6a40 }
+0xaaaaf4da4f20->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xffffe66a6a40 }
+0xffffe66a6a40->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xffffe66a6a40 }
+Entering state 11
+Stack now 0 11 11 11
+Reading a token
+0xffffe66a6950->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20 }
+0xffffe66a6a20->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xffffe66a6950 }
+0xffffe66a6950->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xffffe66a6950, 0xffffe66a6a20 }
+Next token is token 'a' (0xffffe66a6a20 'a')
+0xffffe66a6940->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xffffe66a6a20 }
+0xffffe66a6a20->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xffffe66a6940, 0xffffe66a6a20 }
+Shifting token 'a' (0xffffe66a6940 'a')
+0xaaaaf4da4f40->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xffffe66a6940 }
+0xffffe66a6940->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xaaaaf4da4f40, 0xffffe66a6940 }
+Entering state 2
+Stack now 0 11 11 11 2
+0xffffe66a6a40->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xaaaaf4da4f40 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaaf4da4f40 'a')
+-> $$ = nterm item (0xffffe66a6a40 'a')
+0xaaaaf4da4f40->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xaaaaf4da4f40, 0xffffe66a6a40 }
+0xaaaaf4da4f40->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xffffe66a6a40 }
+0xffffe66a6a40->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xaaaaf4da4f40, 0xffffe66a6a40 }
+Entering state 11
+Stack now 0 11 11 11 11
+Reading a token
+0xffffe66a6950->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xaaaaf4da4f40 }
+0xffffe66a6a20->Object::Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xaaaaf4da4f40, 0xffffe66a6950 }
+0xffffe66a6950->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xaaaaf4da4f40, 0xffffe66a6950, 0xffffe66a6a20 }
+Next token is token 'p' (0xffffe66a6a20 'p'Exception caught: cleaning lookahead and stack
+0xaaaaf4da4f40->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xaaaaf4da4f40, 0xffffe66a6a20 }
+0xaaaaf4da4f20->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xaaaaf4da4f20, 0xffffe66a6a20 }
+0xaaaaf4da4f00->Object::~Object { 0xaaaaf4da4ee0, 0xaaaaf4da4f00, 0xffffe66a6a20 }
+0xaaaaf4da4ee0->Object::~Object { 0xaaaaf4da4ee0, 0xffffe66a6a20 }
+0xffffe66a6a20->Object::~Object { 0xffffe66a6a20 }
+exception caught: printer
+end { }
+./c++.at:1362: grep '^exception caught: printer$' stderr
+stdout:
+exception caught: printer
+./c++.at:1362:  $PREPARSER ./input aaaae
+stderr:
+exception caught: syntax error
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1362:  $PREPARSER ./input aaaaE
+stderr:
+exception caught: syntax error, unexpected end of file, expecting 'a'
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 types.y:87.8-37: warning: unset value: $$ [-Wother]
 types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr]
 types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
 ./cxx-type.at:427: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o types types.c $LIBS
+./c++.at:1362:  $PREPARSER ./input aaaaT
+stderr:
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1362:  $PREPARSER ./input aaaaR
+stderr:
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:1362: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+689. c++.at:1371:  ok
+
+709. cxx-type.at:432: testing GLR: Merge conflicting parses, impure, no locations ...
+./cxx-type.at:433: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o types.c types.y
+stderr:
+types.y:77.8-37: warning: unset value: $$ [-Wother]
+types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr]
+types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+./cxx-type.at:433: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o types types.c $LIBS
 stderr:
 stdout:
 ./c++.at:1555:  $PREPARSER ./test
@@ -260221,8 +260519,9 @@
 ./c++.at:1555: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
 stderr:
 stdout:
-======== Testing with C++ standard flags: ''
-./c++.at:1411: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./c++.at:1555: ./check
+./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -o test.cc test.y
+./c++.at:1555: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 stderr:
 stdout:
 ./cxx-type.at:423:  $PREPARSER ./types test-input
@@ -261366,25 +261665,19 @@
    $1 = token '@' ()
 Cleanup: popping nterm prog ()
 707. cxx-type.at:420:  ok
-stderr:
-stdout:
-./c++.at:1555: ./check
 
-./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -o test.cc test.y
-./c++.at:1555: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-709. cxx-type.at:432: testing GLR: Merge conflicting parses, impure, no locations ...
-./cxx-type.at:433: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o types.c types.y
+710. cxx-type.at:438: testing GLR: Merge conflicting parses, impure, locations ...
+./cxx-type.at:439: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o types.c types.y
 stderr:
-types.y:77.8-37: warning: unset value: $$ [-Wother]
+types.y:87.8-37: warning: unset value: $$ [-Wother]
 types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr]
 types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-./cxx-type.at:433: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o types types.c $LIBS
+./cxx-type.at:439: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o types types.c $LIBS
 stderr:
 stdout:
-./c++.at:858:  $PREPARSER ./input
+./c++.at:859:  $PREPARSER ./input
 stderr:
-./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
+./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:859: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
@@ -261395,6 +261688,7 @@
 ./cxx-type.at:429: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./cxx-type.at:429:  $PREPARSER ./types -p test-input
 stderr:
+stderr:
 Starting parse
 Entering state 0
 Reducing stack 0 by rule 1 (line 71):
@@ -261961,8 +262255,16 @@
 Reducing stack 0 by rule 6 (line 87):
    $1 = token '@' (21.0: )
 Cleanup: popping nterm prog (1.1-19.5: )
+stdout:
+./c++.at:1066:  $PREPARSER ./input < in
 ./cxx-type.at:429: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+error: invalid expression
+caught error
+error: invalid character
+caught error
+./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 Starting parse
 Entering state 0
 Reducing stack 0 by rule 1 (line 71):
@@ -262530,252 +262832,31 @@
    $1 = token '@' (21.0: )
 Cleanup: popping nterm prog (1.1-19.5: )
 708. cxx-type.at:426:  ok
+./c++.at:1066:  $PREPARSER ./input < in
+stderr:
+error: invalid expression
+./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 
-710. cxx-type.at:438: testing GLR: Merge conflicting parses, impure, locations ...
-./cxx-type.at:439: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o types.c types.y
+./c++.at:1066:  $PREPARSER ./input < in
 stderr:
-types.y:87.8-37: warning: unset value: $$ [-Wother]
+error: invalid character
+./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+684. c++.at:1066:  ok
+
+711. cxx-type.at:444: testing GLR: Merge conflicting parses, pure, no locations ...
+./cxx-type.at:445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o types.c types.y
+712. cxx-type.at:449: testing GLR: Merge conflicting parses, pure, locations ...
+./cxx-type.at:450: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o types.c types.y
+stderr:
+types.y:77.8-37: warning: unset value: $$ [-Wother]
 types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr]
 types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-./cxx-type.at:439: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o types types.c $LIBS
-stderr:
-stdout:
-./c++.at:1363:  $PREPARSER ./input aaaas
-stderr:
-exception caught: reduction
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input aaaal
-stderr:
-exception caught: yylex
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input i
-stderr:
-exception caught: initial-action
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input aaaap
-stderr:
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input --debug aaaap
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-0xfffff794d450->Object::Object { }
-0xfffff794d540->Object::Object { 0xfffff794d450 }
-0xfffff794d450->Object::~Object { 0xfffff794d450, 0xfffff794d540 }
-Next token is token 'a' (0xfffff794d540 'a')
-0xfffff794d460->Object::Object { 0xfffff794d540 }
-0xfffff794d540->Object::~Object { 0xfffff794d460, 0xfffff794d540 }
-Shifting token 'a' (0xfffff794d460 'a')
-0xaaaae5cd6ee0->Object::Object { 0xfffff794d460 }
-0xfffff794d460->Object::~Object { 0xaaaae5cd6ee0, 0xfffff794d460 }
-Entering state 1
-Stack now 0 1
-0xfffff794d560->Object::Object { 0xaaaae5cd6ee0 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae5cd6ee0 'a')
--> $$ = nterm item (0xfffff794d560 'a')
-0xaaaae5cd6ee0->Object::~Object { 0xaaaae5cd6ee0, 0xfffff794d560 }
-0xaaaae5cd6ee0->Object::Object { 0xfffff794d560 }
-0xfffff794d560->Object::~Object { 0xaaaae5cd6ee0, 0xfffff794d560 }
-Entering state 10
-Stack now 0 10
-Reading a token
-0xfffff794d450->Object::Object { 0xaaaae5cd6ee0 }
-0xfffff794d540->Object::Object { 0xaaaae5cd6ee0, 0xfffff794d450 }
-0xfffff794d450->Object::~Object { 0xaaaae5cd6ee0, 0xfffff794d450, 0xfffff794d540 }
-Next token is token 'a' (0xfffff794d540 'a')
-0xfffff794d460->Object::Object { 0xaaaae5cd6ee0, 0xfffff794d540 }
-0xfffff794d540->Object::~Object { 0xaaaae5cd6ee0, 0xfffff794d460, 0xfffff794d540 }
-Shifting token 'a' (0xfffff794d460 'a')
-0xaaaae5cd6f00->Object::Object { 0xaaaae5cd6ee0, 0xfffff794d460 }
-0xfffff794d460->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xfffff794d460 }
-Entering state 1
-Stack now 0 10 1
-0xfffff794d560->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae5cd6f00 'a')
--> $$ = nterm item (0xfffff794d560 'a')
-0xaaaae5cd6f00->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xfffff794d560 }
-0xaaaae5cd6f00->Object::Object { 0xaaaae5cd6ee0, 0xfffff794d560 }
-0xfffff794d560->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xfffff794d560 }
-Entering state 10
-Stack now 0 10 10
-Reading a token
-0xfffff794d450->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00 }
-0xfffff794d540->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xfffff794d450 }
-0xfffff794d450->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xfffff794d450, 0xfffff794d540 }
-Next token is token 'a' (0xfffff794d540 'a')
-0xfffff794d460->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xfffff794d540 }
-0xfffff794d540->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xfffff794d460, 0xfffff794d540 }
-Shifting token 'a' (0xfffff794d460 'a')
-0xaaaae5cd6f20->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xfffff794d460 }
-0xfffff794d460->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xfffff794d460 }
-Entering state 1
-Stack now 0 10 10 1
-0xfffff794d560->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae5cd6f20 'a')
--> $$ = nterm item (0xfffff794d560 'a')
-0xaaaae5cd6f20->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xfffff794d560 }
-0xaaaae5cd6f20->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xfffff794d560 }
-0xfffff794d560->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xfffff794d560 }
-Entering state 10
-Stack now 0 10 10 10
-Reading a token
-0xfffff794d450->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20 }
-0xfffff794d540->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xfffff794d450 }
-0xfffff794d450->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xfffff794d450, 0xfffff794d540 }
-Next token is token 'a' (0xfffff794d540 'a')
-0xfffff794d460->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xfffff794d540 }
-0xfffff794d540->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xfffff794d460, 0xfffff794d540 }
-Shifting token 'a' (0xfffff794d460 'a')
-0xaaaae5cd6f40->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xfffff794d460 }
-0xfffff794d460->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xaaaae5cd6f40, 0xfffff794d460 }
-Entering state 1
-Stack now 0 10 10 10 1
-0xfffff794d560->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xaaaae5cd6f40 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae5cd6f40 'a')
--> $$ = nterm item (0xfffff794d560 'a')
-0xaaaae5cd6f40->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xaaaae5cd6f40, 0xfffff794d560 }
-0xaaaae5cd6f40->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xfffff794d560 }
-0xfffff794d560->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xaaaae5cd6f40, 0xfffff794d560 }
-Entering state 10
-Stack now 0 10 10 10 10
-Reading a token
-0xfffff794d450->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xaaaae5cd6f40 }
-0xfffff794d540->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xaaaae5cd6f40, 0xfffff794d450 }
-0xfffff794d450->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xaaaae5cd6f40, 0xfffff794d450, 0xfffff794d540 }
-Next token is token 'p' (0xfffff794d540 'p'Exception caught: cleaning lookahead and stack
-0xaaaae5cd6f40->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xaaaae5cd6f40, 0xfffff794d540 }
-0xaaaae5cd6f20->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xfffff794d540 }
-0xaaaae5cd6f00->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xfffff794d540 }
-0xaaaae5cd6ee0->Object::~Object { 0xaaaae5cd6ee0, 0xfffff794d540 }
-0xfffff794d540->Object::~Object { 0xfffff794d540 }
-exception caught: printer
-end { }
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-0xfffff794d450->Object::Object { }
-0xfffff794d540->Object::Object { 0xfffff794d450 }
-0xfffff794d450->Object::~Object { 0xfffff794d450, 0xfffff794d540 }
-Next token is token 'a' (0xfffff794d540 'a')
-0xfffff794d460->Object::Object { 0xfffff794d540 }
-0xfffff794d540->Object::~Object { 0xfffff794d460, 0xfffff794d540 }
-Shifting token 'a' (0xfffff794d460 'a')
-0xaaaae5cd6ee0->Object::Object { 0xfffff794d460 }
-0xfffff794d460->Object::~Object { 0xaaaae5cd6ee0, 0xfffff794d460 }
-Entering state 1
-Stack now 0 1
-0xfffff794d560->Object::Object { 0xaaaae5cd6ee0 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae5cd6ee0 'a')
--> $$ = nterm item (0xfffff794d560 'a')
-0xaaaae5cd6ee0->Object::~Object { 0xaaaae5cd6ee0, 0xfffff794d560 }
-0xaaaae5cd6ee0->Object::Object { 0xfffff794d560 }
-0xfffff794d560->Object::~Object { 0xaaaae5cd6ee0, 0xfffff794d560 }
-Entering state 10
-Stack now 0 10
-Reading a token
-0xfffff794d450->Object::Object { 0xaaaae5cd6ee0 }
-0xfffff794d540->Object::Object { 0xaaaae5cd6ee0, 0xfffff794d450 }
-0xfffff794d450->Object::~Object { 0xaaaae5cd6ee0, 0xfffff794d450, 0xfffff794d540 }
-Next token is token 'a' (0xfffff794d540 'a')
-0xfffff794d460->Object::Object { 0xaaaae5cd6ee0, 0xfffff794d540 }
-0xfffff794d540->Object::~Object { 0xaaaae5cd6ee0, 0xfffff794d460, 0xfffff794d540 }
-Shifting token 'a' (0xfffff794d460 'a')
-0xaaaae5cd6f00->Object::Object { 0xaaaae5cd6ee0, 0xfffff794d460 }
-0xfffff794d460->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xfffff794d460 }
-Entering state 1
-Stack now 0 10 1
-0xfffff794d560->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae5cd6f00 'a')
--> $$ = nterm item (0xfffff794d560 'a')
-0xaaaae5cd6f00->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xfffff794d560 }
-0xaaaae5cd6f00->Object::Object { 0xaaaae5cd6ee0, 0xfffff794d560 }
-0xfffff794d560->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xfffff794d560 }
-Entering state 10
-Stack now 0 10 10
-Reading a token
-0xfffff794d450->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00 }
-0xfffff794d540->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xfffff794d450 }
-0xfffff794d450->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xfffff794d450, 0xfffff794d540 }
-Next token is token 'a' (0xfffff794d540 'a')
-0xfffff794d460->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xfffff794d540 }
-0xfffff794d540->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xfffff794d460, 0xfffff794d540 }
-Shifting token 'a' (0xfffff794d460 'a')
-0xaaaae5cd6f20->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xfffff794d460 }
-0xfffff794d460->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xfffff794d460 }
-Entering state 1
-Stack now 0 10 10 1
-0xfffff794d560->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae5cd6f20 'a')
--> $$ = nterm item (0xfffff794d560 'a')
-0xaaaae5cd6f20->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xfffff794d560 }
-0xaaaae5cd6f20->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xfffff794d560 }
-0xfffff794d560->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xfffff794d560 }
-Entering state 10
-Stack now 0 10 10 10
-Reading a token
-0xfffff794d450->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20 }
-0xfffff794d540->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xfffff794d450 }
-0xfffff794d450->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xfffff794d450, 0xfffff794d540 }
-Next token is token 'a' (0xfffff794d540 'a')
-0xfffff794d460->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xfffff794d540 }
-0xfffff794d540->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xfffff794d460, 0xfffff794d540 }
-Shifting token 'a' (0xfffff794d460 'a')
-0xaaaae5cd6f40->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xfffff794d460 }
-0xfffff794d460->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xaaaae5cd6f40, 0xfffff794d460 }
-Entering state 1
-Stack now 0 10 10 10 1
-0xfffff794d560->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xaaaae5cd6f40 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae5cd6f40 'a')
--> $$ = nterm item (0xfffff794d560 'a')
-0xaaaae5cd6f40->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xaaaae5cd6f40, 0xfffff794d560 }
-0xaaaae5cd6f40->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xfffff794d560 }
-0xfffff794d560->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xaaaae5cd6f40, 0xfffff794d560 }
-Entering state 10
-Stack now 0 10 10 10 10
-Reading a token
-0xfffff794d450->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xaaaae5cd6f40 }
-0xfffff794d540->Object::Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xaaaae5cd6f40, 0xfffff794d450 }
-0xfffff794d450->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xaaaae5cd6f40, 0xfffff794d450, 0xfffff794d540 }
-Next token is token 'p' (0xfffff794d540 'p'Exception caught: cleaning lookahead and stack
-0xaaaae5cd6f40->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xaaaae5cd6f40, 0xfffff794d540 }
-0xaaaae5cd6f20->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xaaaae5cd6f20, 0xfffff794d540 }
-0xaaaae5cd6f00->Object::~Object { 0xaaaae5cd6ee0, 0xaaaae5cd6f00, 0xfffff794d540 }
-0xaaaae5cd6ee0->Object::~Object { 0xaaaae5cd6ee0, 0xfffff794d540 }
-0xfffff794d540->Object::~Object { 0xfffff794d540 }
-exception caught: printer
-end { }
-./c++.at:1363: grep '^exception caught: printer$' stderr
-stdout:
-exception caught: printer
-./c++.at:1363:  $PREPARSER ./input aaaae
-stderr:
-exception caught: syntax error
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input aaaaE
-stderr:
-exception caught: syntax error, unexpected end of file, expecting 'a'
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input aaaaT
-stderr:
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input aaaaR
+./cxx-type.at:445: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o types types.c $LIBS
 stderr:
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:1363: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+types.y:87.8-37: warning: unset value: $$ [-Wother]
+types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr]
+types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
+./cxx-type.at:450: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o types types.c $LIBS
 stderr:
 stdout:
 ./cxx-type.at:435:  $PREPARSER ./types test-input
@@ -264004,24 +264085,13 @@
 Cleanup: popping nterm prog ()
 709. cxx-type.at:432:  ok
 
-711. cxx-type.at:444: testing GLR: Merge conflicting parses, pure, no locations ...
-./cxx-type.at:445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o types.c types.y
+713. cxx-type.at:455: testing GLR: Verbose messages, resolve ambiguity, impure, no locations ...
+./cxx-type.at:456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o types.c types.y
 stderr:
 types.y:77.8-37: warning: unset value: $$ [-Wother]
 types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr]
 types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-./cxx-type.at:445: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o types types.c $LIBS
-stderr:
-stdout:
-689. c++.at:1371:  ok
-
-712. cxx-type.at:449: testing GLR: Merge conflicting parses, pure, locations ...
-./cxx-type.at:450: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o types.c types.y
-stderr:
-types.y:87.8-37: warning: unset value: $$ [-Wother]
-types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr]
-types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-./cxx-type.at:450: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o types types.c $LIBS
+./cxx-type.at:456: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o types types.c $LIBS
 stderr:
 stdout:
 ./c++.at:859:  $PREPARSER ./input
@@ -264031,38 +264101,12 @@
 ./c++.at:859: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:1555:  $PREPARSER ./test
-stderr:
-./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:1555: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
-stderr:
-stdout:
-./c++.at:1555: ./check
-./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -o test.cc test.y
-stderr:
-stdout:
-./c++.at:1555: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
-./c++.at:1362:  $PREPARSER ./input aaaas
-stderr:
-stderr:
-exception caught: reduction
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1362:  $PREPARSER ./input aaaal
-stdout:
-stderr:
-exception caught: yylex
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./cxx-type.at:441:  $PREPARSER ./types test-input
 stderr:
 17.5: syntax error
 ./cxx-type.at:441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1362:  $PREPARSER ./input i
 ./cxx-type.at:441:  $PREPARSER ./types -p test-input
 stderr:
-stderr:
-exception caught: initial-action
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reducing stack 0 by rule 1 (line 71):
@@ -264671,12 +264715,8 @@
 Reducing stack 0 by rule 6 (line 87):
    $1 = token '@' (21.0: )
 Cleanup: popping nterm prog (1.1-19.5: )
-./c++.at:1362:  $PREPARSER ./input aaaap
 ./cxx-type.at:441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-stderr:
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1362:  $PREPARSER ./input --debug aaaap
 Starting parse
 Entering state 0
 Reducing stack 0 by rule 1 (line 71):
@@ -265286,851 +265326,872 @@
    $1 = token '@' (21.0: )
 Cleanup: popping nterm prog (1.1-19.5: )
 710. cxx-type.at:438:  ok
+
+714. glr-regression.at:205: testing Badly Collapsed GLR States: glr.c ...
+./glr-regression.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o glr-regr1.c glr-regr1.y
+./glr-regression.at:205: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr1 glr-regr1.c $LIBS
+stderr:
+stdout:
+./c++.at:1363:  $PREPARSER ./input aaaas
+stderr:
+exception caught: reduction
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input aaaal
+stderr:
+exception caught: yylex
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input i
+stderr:
+exception caught: initial-action
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input aaaap
+stderr:
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input --debug aaaap
 stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-0xffffd87d80c0->Object::Object { }
-0xffffd87d8190->Object::Object { 0xffffd87d80c0 }
-0xffffd87d80c0->Object::~Object { 0xffffd87d80c0, 0xffffd87d8190 }
-Next token is token 'a' (0xffffd87d8190 'a')
-0xffffd87d80b0->Object::Object { 0xffffd87d8190 }
-0xffffd87d8190->Object::~Object { 0xffffd87d80b0, 0xffffd87d8190 }
-Shifting token 'a' (0xffffd87d80b0 'a')
-0xaaab07b9dee0->Object::Object { 0xffffd87d80b0 }
-0xffffd87d80b0->Object::~Object { 0xaaab07b9dee0, 0xffffd87d80b0 }
-Entering state 2
-Stack now 0 2
-0xffffd87d81b0->Object::Object { 0xaaab07b9dee0 }
+0xfffff5700ed0->Object::Object { }
+0xfffff5700fc0->Object::Object { 0xfffff5700ed0 }
+0xfffff5700ed0->Object::~Object { 0xfffff5700ed0, 0xfffff5700fc0 }
+Next token is token 'a' (0xfffff5700fc0 'a')
+0xfffff5700ee0->Object::Object { 0xfffff5700fc0 }
+0xfffff5700fc0->Object::~Object { 0xfffff5700ee0, 0xfffff5700fc0 }
+Shifting token 'a' (0xfffff5700ee0 'a')
+0xaaaabd21bee0->Object::Object { 0xfffff5700ee0 }
+0xfffff5700ee0->Object::~Object { 0xaaaabd21bee0, 0xfffff5700ee0 }
+Entering state 1
+Stack now 0 1
+0xfffff5700fe0->Object::Object { 0xaaaabd21bee0 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab07b9dee0 'a')
--> $$ = nterm item (0xffffd87d81b0 'a')
-0xaaab07b9dee0->Object::~Object { 0xaaab07b9dee0, 0xffffd87d81b0 }
-0xaaab07b9dee0->Object::Object { 0xffffd87d81b0 }
-0xffffd87d81b0->Object::~Object { 0xaaab07b9dee0, 0xffffd87d81b0 }
-Entering state 11
-Stack now 0 11
+   $1 = token 'a' (0xaaaabd21bee0 'a')
+-> $$ = nterm item (0xfffff5700fe0 'a')
+0xaaaabd21bee0->Object::~Object { 0xaaaabd21bee0, 0xfffff5700fe0 }
+0xaaaabd21bee0->Object::Object { 0xfffff5700fe0 }
+0xfffff5700fe0->Object::~Object { 0xaaaabd21bee0, 0xfffff5700fe0 }
+Entering state 10
+Stack now 0 10
 Reading a token
-0xffffd87d80c0->Object::Object { 0xaaab07b9dee0 }
-0xffffd87d8190->Object::Object { 0xaaab07b9dee0, 0xffffd87d80c0 }
-0xffffd87d80c0->Object::~Object { 0xaaab07b9dee0, 0xffffd87d80c0, 0xffffd87d8190 }
-Next token is token 'a' (0xffffd87d8190 'a')
-0xffffd87d80b0->Object::Object { 0xaaab07b9dee0, 0xffffd87d8190 }
-0xffffd87d8190->Object::~Object { 0xaaab07b9dee0, 0xffffd87d80b0, 0xffffd87d8190 }
-Shifting token 'a' (0xffffd87d80b0 'a')
-0xaaab07b9df00->Object::Object { 0xaaab07b9dee0, 0xffffd87d80b0 }
-0xffffd87d80b0->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xffffd87d80b0 }
-Entering state 2
-Stack now 0 11 2
-0xffffd87d81b0->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00 }
+0xfffff5700ed0->Object::Object { 0xaaaabd21bee0 }
+0xfffff5700fc0->Object::Object { 0xaaaabd21bee0, 0xfffff5700ed0 }
+0xfffff5700ed0->Object::~Object { 0xaaaabd21bee0, 0xfffff5700ed0, 0xfffff5700fc0 }
+Next token is token 'a' (0xfffff5700fc0 'a')
+0xfffff5700ee0->Object::Object { 0xaaaabd21bee0, 0xfffff5700fc0 }
+0xfffff5700fc0->Object::~Object { 0xaaaabd21bee0, 0xfffff5700ee0, 0xfffff5700fc0 }
+Shifting token 'a' (0xfffff5700ee0 'a')
+0xaaaabd21bf00->Object::Object { 0xaaaabd21bee0, 0xfffff5700ee0 }
+0xfffff5700ee0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xfffff5700ee0 }
+Entering state 1
+Stack now 0 10 1
+0xfffff5700fe0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab07b9df00 'a')
--> $$ = nterm item (0xffffd87d81b0 'a')
-0xaaab07b9df00->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xffffd87d81b0 }
-0xaaab07b9df00->Object::Object { 0xaaab07b9dee0, 0xffffd87d81b0 }
-0xffffd87d81b0->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xffffd87d81b0 }
-Entering state 11
-Stack now 0 11 11
+   $1 = token 'a' (0xaaaabd21bf00 'a')
+-> $$ = nterm item (0xfffff5700fe0 'a')
+0xaaaabd21bf00->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xfffff5700fe0 }
+0xaaaabd21bf00->Object::Object { 0xaaaabd21bee0, 0xfffff5700fe0 }
+0xfffff5700fe0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xfffff5700fe0 }
+Entering state 10
+Stack now 0 10 10
 Reading a token
-0xffffd87d80c0->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00 }
-0xffffd87d8190->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xffffd87d80c0 }
-0xffffd87d80c0->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xffffd87d80c0, 0xffffd87d8190 }
-Next token is token 'a' (0xffffd87d8190 'a')
-0xffffd87d80b0->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xffffd87d8190 }
-0xffffd87d8190->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xffffd87d80b0, 0xffffd87d8190 }
-Shifting token 'a' (0xffffd87d80b0 'a')
-0xaaab07b9df20->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xffffd87d80b0 }
-0xffffd87d80b0->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xffffd87d80b0 }
-Entering state 2
-Stack now 0 11 11 2
-0xffffd87d81b0->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20 }
+0xfffff5700ed0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00 }
+0xfffff5700fc0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xfffff5700ed0 }
+0xfffff5700ed0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xfffff5700ed0, 0xfffff5700fc0 }
+Next token is token 'a' (0xfffff5700fc0 'a')
+0xfffff5700ee0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xfffff5700fc0 }
+0xfffff5700fc0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xfffff5700ee0, 0xfffff5700fc0 }
+Shifting token 'a' (0xfffff5700ee0 'a')
+0xaaaabd21bf20->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xfffff5700ee0 }
+0xfffff5700ee0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xfffff5700ee0 }
+Entering state 1
+Stack now 0 10 10 1
+0xfffff5700fe0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab07b9df20 'a')
--> $$ = nterm item (0xffffd87d81b0 'a')
-0xaaab07b9df20->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xffffd87d81b0 }
-0xaaab07b9df20->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xffffd87d81b0 }
-0xffffd87d81b0->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xffffd87d81b0 }
-Entering state 11
-Stack now 0 11 11 11
+   $1 = token 'a' (0xaaaabd21bf20 'a')
+-> $$ = nterm item (0xfffff5700fe0 'a')
+0xaaaabd21bf20->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xfffff5700fe0 }
+0xaaaabd21bf20->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xfffff5700fe0 }
+0xfffff5700fe0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xfffff5700fe0 }
+Entering state 10
+Stack now 0 10 10 10
 Reading a token
-0xffffd87d80c0->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20 }
-0xffffd87d8190->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xffffd87d80c0 }
-0xffffd87d80c0->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xffffd87d80c0, 0xffffd87d8190 }
-Next token is token 'a' (0xffffd87d8190 'a')
-0xffffd87d80b0->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xffffd87d8190 }
-0xffffd87d8190->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xffffd87d80b0, 0xffffd87d8190 }
-Shifting token 'a' (0xffffd87d80b0 'a')
-0xaaab07b9df40->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xffffd87d80b0 }
-0xffffd87d80b0->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xaaab07b9df40, 0xffffd87d80b0 }
-Entering state 2
-Stack now 0 11 11 11 2
-0xffffd87d81b0->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xaaab07b9df40 }
+0xfffff5700ed0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20 }
+0xfffff5700fc0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xfffff5700ed0 }
+0xfffff5700ed0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xfffff5700ed0, 0xfffff5700fc0 }
+Next token is token 'a' (0xfffff5700fc0 'a')
+0xfffff5700ee0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xfffff5700fc0 }
+0xfffff5700fc0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xfffff5700ee0, 0xfffff5700fc0 }
+Shifting token 'a' (0xfffff5700ee0 'a')
+0xaaaabd21bf40->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xfffff5700ee0 }
+0xfffff5700ee0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xaaaabd21bf40, 0xfffff5700ee0 }
+Entering state 1
+Stack now 0 10 10 10 1
+0xfffff5700fe0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xaaaabd21bf40 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab07b9df40 'a')
--> $$ = nterm item (0xffffd87d81b0 'a')
-0xaaab07b9df40->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xaaab07b9df40, 0xffffd87d81b0 }
-0xaaab07b9df40->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xffffd87d81b0 }
-0xffffd87d81b0->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xaaab07b9df40, 0xffffd87d81b0 }
-Entering state 11
-Stack now 0 11 11 11 11
+   $1 = token 'a' (0xaaaabd21bf40 'a')
+-> $$ = nterm item (0xfffff5700fe0 'a')
+0xaaaabd21bf40->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xaaaabd21bf40, 0xfffff5700fe0 }
+0xaaaabd21bf40->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xfffff5700fe0 }
+0xfffff5700fe0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xaaaabd21bf40, 0xfffff5700fe0 }
+Entering state 10
+Stack now 0 10 10 10 10
 Reading a token
-0xffffd87d80c0->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xaaab07b9df40 }
-0xffffd87d8190->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xaaab07b9df40, 0xffffd87d80c0 }
-0xffffd87d80c0->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xaaab07b9df40, 0xffffd87d80c0, 0xffffd87d8190 }
-Next token is token 'p' (0xffffd87d8190 'p'Exception caught: cleaning lookahead and stack
-0xaaab07b9df40->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xaaab07b9df40, 0xffffd87d8190 }
-0xaaab07b9df20->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xffffd87d8190 }
-0xaaab07b9df00->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xffffd87d8190 }
-0xaaab07b9dee0->Object::~Object { 0xaaab07b9dee0, 0xffffd87d8190 }
-0xffffd87d8190->Object::~Object { 0xffffd87d8190 }
+0xfffff5700ed0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xaaaabd21bf40 }
+0xfffff5700fc0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xaaaabd21bf40, 0xfffff5700ed0 }
+0xfffff5700ed0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xaaaabd21bf40, 0xfffff5700ed0, 0xfffff5700fc0 }
+Next token is token 'p' (0xfffff5700fc0 'p'Exception caught: cleaning lookahead and stack
+0xaaaabd21bf40->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xaaaabd21bf40, 0xfffff5700fc0 }
+0xaaaabd21bf20->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xfffff5700fc0 }
+0xaaaabd21bf00->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xfffff5700fc0 }
+0xaaaabd21bee0->Object::~Object { 0xaaaabd21bee0, 0xfffff5700fc0 }
+0xfffff5700fc0->Object::~Object { 0xfffff5700fc0 }
 exception caught: printer
 end { }
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
-
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-0xffffd87d80c0->Object::Object { }
-0xffffd87d8190->Object::Object { 0xffffd87d80c0 }
-0xffffd87d80c0->Object::~Object { 0xffffd87d80c0, 0xffffd87d8190 }
-Next token is token 'a' (0xffffd87d8190 'a')
-0xffffd87d80b0->Object::Object { 0xffffd87d8190 }
-0xffffd87d8190->Object::~Object { 0xffffd87d80b0, 0xffffd87d8190 }
-Shifting token 'a' (0xffffd87d80b0 'a')
-0xaaab07b9dee0->Object::Object { 0xffffd87d80b0 }
-0xffffd87d80b0->Object::~Object { 0xaaab07b9dee0, 0xffffd87d80b0 }
-Entering state 2
-Stack now 0 2
-0xffffd87d81b0->Object::Object { 0xaaab07b9dee0 }
+0xfffff5700ed0->Object::Object { }
+0xfffff5700fc0->Object::Object { 0xfffff5700ed0 }
+0xfffff5700ed0->Object::~Object { 0xfffff5700ed0, 0xfffff5700fc0 }
+Next token is token 'a' (0xfffff5700fc0 'a')
+0xfffff5700ee0->Object::Object { 0xfffff5700fc0 }
+0xfffff5700fc0->Object::~Object { 0xfffff5700ee0, 0xfffff5700fc0 }
+Shifting token 'a' (0xfffff5700ee0 'a')
+0xaaaabd21bee0->Object::Object { 0xfffff5700ee0 }
+0xfffff5700ee0->Object::~Object { 0xaaaabd21bee0, 0xfffff5700ee0 }
+Entering state 1
+Stack now 0 1
+0xfffff5700fe0->Object::Object { 0xaaaabd21bee0 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab07b9dee0 'a')
--> $$ = nterm item (0xffffd87d81b0 'a')
-0xaaab07b9dee0->Object::~Object { 0xaaab07b9dee0, 0xffffd87d81b0 }
-0xaaab07b9dee0->Object::Object { 0xffffd87d81b0 }
-0xffffd87d81b0->Object::~Object { 0xaaab07b9dee0, 0xffffd87d81b0 }
-Entering state 11
-Stack now 0 11
+   $1 = token 'a' (0xaaaabd21bee0 'a')
+-> $$ = nterm item (0xfffff5700fe0 'a')
+0xaaaabd21bee0->Object::~Object { 0xaaaabd21bee0, 0xfffff5700fe0 }
+0xaaaabd21bee0->Object::Object { 0xfffff5700fe0 }
+0xfffff5700fe0->Object::~Object { 0xaaaabd21bee0, 0xfffff5700fe0 }
+Entering state 10
+Stack now 0 10
 Reading a token
-0xffffd87d80c0->Object::Object { 0xaaab07b9dee0 }
-0xffffd87d8190->Object::Object { 0xaaab07b9dee0, 0xffffd87d80c0 }
-0xffffd87d80c0->Object::~Object { 0xaaab07b9dee0, 0xffffd87d80c0, 0xffffd87d8190 }
-Next token is token 'a' (0xffffd87d8190 'a')
-0xffffd87d80b0->Object::Object { 0xaaab07b9dee0, 0xffffd87d8190 }
-0xffffd87d8190->Object::~Object { 0xaaab07b9dee0, 0xffffd87d80b0, 0xffffd87d8190 }
-Shifting token 'a' (0xffffd87d80b0 'a')
-0xaaab07b9df00->Object::Object { 0xaaab07b9dee0, 0xffffd87d80b0 }
-0xffffd87d80b0->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xffffd87d80b0 }
-Entering state 2
-Stack now 0 11 2
-0xffffd87d81b0->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00 }
+0xfffff5700ed0->Object::Object { 0xaaaabd21bee0 }
+0xfffff5700fc0->Object::Object { 0xaaaabd21bee0, 0xfffff5700ed0 }
+0xfffff5700ed0->Object::~Object { 0xaaaabd21bee0, 0xfffff5700ed0, 0xfffff5700fc0 }
+Next token is token 'a' (0xfffff5700fc0 'a')
+0xfffff5700ee0->Object::Object { 0xaaaabd21bee0, 0xfffff5700fc0 }
+0xfffff5700fc0->Object::~Object { 0xaaaabd21bee0, 0xfffff5700ee0, 0xfffff5700fc0 }
+Shifting token 'a' (0xfffff5700ee0 'a')
+0xaaaabd21bf00->Object::Object { 0xaaaabd21bee0, 0xfffff5700ee0 }
+0xfffff5700ee0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xfffff5700ee0 }
+Entering state 1
+Stack now 0 10 1
+0xfffff5700fe0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab07b9df00 'a')
--> $$ = nterm item (0xffffd87d81b0 'a')
-0xaaab07b9df00->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xffffd87d81b0 }
-0xaaab07b9df00->Object::Object { 0xaaab07b9dee0, 0xffffd87d81b0 }
-0xffffd87d81b0->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xffffd87d81b0 }
-Entering state 11
-Stack now 0 11 11
+   $1 = token 'a' (0xaaaabd21bf00 'a')
+-> $$ = nterm item (0xfffff5700fe0 'a')
+0xaaaabd21bf00->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xfffff5700fe0 }
+0xaaaabd21bf00->Object::Object { 0xaaaabd21bee0, 0xfffff5700fe0 }
+0xfffff5700fe0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xfffff5700fe0 }
+Entering state 10
+Stack now 0 10 10
 Reading a token
-0xffffd87d80c0->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00 }
-0xffffd87d8190->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xffffd87d80c0 }
-0xffffd87d80c0->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xffffd87d80c0, 0xffffd87d8190 }
-Next token is token 'a' (0xffffd87d8190 'a')
-0xffffd87d80b0->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xffffd87d8190 }
-0xffffd87d8190->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xffffd87d80b0, 0xffffd87d8190 }
-Shifting token 'a' (0xffffd87d80b0 'a')
-0xaaab07b9df20->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xffffd87d80b0 }
-0xffffd87d80b0->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xffffd87d80b0 }
-Entering state 2
-Stack now 0 11 11 2
-0xffffd87d81b0->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20 }
+0xfffff5700ed0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00 }
+0xfffff5700fc0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xfffff5700ed0 }
+0xfffff5700ed0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xfffff5700ed0, 0xfffff5700fc0 }
+Next token is token 'a' (0xfffff5700fc0 'a')
+0xfffff5700ee0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xfffff5700fc0 }
+0xfffff5700fc0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xfffff5700ee0, 0xfffff5700fc0 }
+Shifting token 'a' (0xfffff5700ee0 'a')
+0xaaaabd21bf20->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xfffff5700ee0 }
+0xfffff5700ee0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xfffff5700ee0 }
+Entering state 1
+Stack now 0 10 10 1
+0xfffff5700fe0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab07b9df20 'a')
--> $$ = nterm item (0xffffd87d81b0 'a')
-0xaaab07b9df20->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xffffd87d81b0 }
-0xaaab07b9df20->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xffffd87d81b0 }
-0xffffd87d81b0->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xffffd87d81b0 }
-Entering state 11
-Stack now 0 11 11 11
+   $1 = token 'a' (0xaaaabd21bf20 'a')
+-> $$ = nterm item (0xfffff5700fe0 'a')
+0xaaaabd21bf20->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xfffff5700fe0 }
+0xaaaabd21bf20->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xfffff5700fe0 }
+0xfffff5700fe0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xfffff5700fe0 }
+Entering state 10
+Stack now 0 10 10 10
 Reading a token
-0xffffd87d80c0->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20 }
-0xffffd87d8190->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xffffd87d80c0 }
-0xffffd87d80c0->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xffffd87d80c0, 0xffffd87d8190 }
-Next token is token 'a' (0xffffd87d8190 'a')
-0xffffd87d80b0->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xffffd87d8190 }
-0xffffd87d8190->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xffffd87d80b0, 0xffffd87d8190 }
-Shifting token 'a' (0xffffd87d80b0 'a')
-0xaaab07b9df40->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xffffd87d80b0 }
-0xffffd87d80b0->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xaaab07b9df40, 0xffffd87d80b0 }
-Entering state 2
-Stack now 0 11 11 11 2
-0xffffd87d81b0->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xaaab07b9df40 }
+0xfffff5700ed0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20 }
+0xfffff5700fc0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xfffff5700ed0 }
+0xfffff5700ed0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xfffff5700ed0, 0xfffff5700fc0 }
+Next token is token 'a' (0xfffff5700fc0 'a')
+0xfffff5700ee0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xfffff5700fc0 }
+0xfffff5700fc0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xfffff5700ee0, 0xfffff5700fc0 }
+Shifting token 'a' (0xfffff5700ee0 'a')
+0xaaaabd21bf40->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xfffff5700ee0 }
+0xfffff5700ee0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xaaaabd21bf40, 0xfffff5700ee0 }
+Entering state 1
+Stack now 0 10 10 10 1
+0xfffff5700fe0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xaaaabd21bf40 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab07b9df40 'a')
--> $$ = nterm item (0xffffd87d81b0 'a')
-0xaaab07b9df40->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xaaab07b9df40, 0xffffd87d81b0 }
-0xaaab07b9df40->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xffffd87d81b0 }
-0xffffd87d81b0->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xaaab07b9df40, 0xffffd87d81b0 }
-Entering state 11
-Stack now 0 11 11 11 11
+   $1 = token 'a' (0xaaaabd21bf40 'a')
+-> $$ = nterm item (0xfffff5700fe0 'a')
+0xaaaabd21bf40->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xaaaabd21bf40, 0xfffff5700fe0 }
+0xaaaabd21bf40->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xfffff5700fe0 }
+0xfffff5700fe0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xaaaabd21bf40, 0xfffff5700fe0 }
+Entering state 10
+Stack now 0 10 10 10 10
 Reading a token
-0xffffd87d80c0->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xaaab07b9df40 }
-0xffffd87d8190->Object::Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xaaab07b9df40, 0xffffd87d80c0 }
-0xffffd87d80c0->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xaaab07b9df40, 0xffffd87d80c0, 0xffffd87d8190 }
-Next token is token 'p' (0xffffd87d8190 'p'Exception caught: cleaning lookahead and stack
-0xaaab07b9df40->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xaaab07b9df40, 0xffffd87d8190 }
-0xaaab07b9df20->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xaaab07b9df20, 0xffffd87d8190 }
-0xaaab07b9df00->Object::~Object { 0xaaab07b9dee0, 0xaaab07b9df00, 0xffffd87d8190 }
-0xaaab07b9dee0->Object::~Object { 0xaaab07b9dee0, 0xffffd87d8190 }
-0xffffd87d8190->Object::~Object { 0xffffd87d8190 }
+0xfffff5700ed0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xaaaabd21bf40 }
+0xfffff5700fc0->Object::Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xaaaabd21bf40, 0xfffff5700ed0 }
+0xfffff5700ed0->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xaaaabd21bf40, 0xfffff5700ed0, 0xfffff5700fc0 }
+Next token is token 'p' (0xfffff5700fc0 'p'Exception caught: cleaning lookahead and stack
+0xaaaabd21bf40->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xaaaabd21bf40, 0xfffff5700fc0 }
+0xaaaabd21bf20->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xaaaabd21bf20, 0xfffff5700fc0 }
+0xaaaabd21bf00->Object::~Object { 0xaaaabd21bee0, 0xaaaabd21bf00, 0xfffff5700fc0 }
+0xaaaabd21bee0->Object::~Object { 0xaaaabd21bee0, 0xfffff5700fc0 }
+0xfffff5700fc0->Object::~Object { 0xfffff5700fc0 }
 exception caught: printer
 end { }
-./c++.at:1362: grep '^exception caught: printer$' stderr
+./c++.at:1363: grep '^exception caught: printer$' stderr
 stdout:
 exception caught: printer
-./c++.at:1362:  $PREPARSER ./input aaaae
+./c++.at:1363:  $PREPARSER ./input aaaae
 stderr:
 exception caught: syntax error
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1362:  $PREPARSER ./input aaaaE
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input aaaaE
 stderr:
 exception caught: syntax error, unexpected end of file, expecting 'a'
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1362:  $PREPARSER ./input aaaaT
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input aaaaT
 stderr:
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1362:  $PREPARSER ./input aaaaR
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363:  $PREPARSER ./input aaaaR
 stderr:
-./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:1362: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-713. cxx-type.at:455: testing GLR: Verbose messages, resolve ambiguity, impure, no locations ...
-./cxx-type.at:456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o types.c types.y
+./c++.at:1363: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
-types.y:77.8-37: warning: unset value: $$ [-Wother]
-types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr]
-types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples
-./cxx-type.at:456: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o types types.c $LIBS
+stdout:
+stderr:
+./cxx-type.at:452:  $PREPARSER ./types test-input
 stderr:
+17.5: syntax error
 stdout:
 ./cxx-type.at:447:  $PREPARSER ./types test-input
+./cxx-type.at:452: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 syntax error
+./cxx-type.at:452:  $PREPARSER ./types -p test-input
 ./cxx-type.at:447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./cxx-type.at:447:  $PREPARSER ./types -p test-input
 stderr:
 Starting parse
 Entering state 0
-Reducing stack 0 by rule 1 (line 64):
--> $$ = nterm prog ()
+Reducing stack 0 by rule 1 (line 71):
+-> $$ = nterm prog (1.1: )
 Entering state 1
 Reading a token
-Next token is token ID ()
-Shifting token ID ()
+Next token is token ID (3.0: )
+Shifting token ID (3.0: )
 Entering state 5
-Reducing stack 0 by rule 7 (line 80):
-   $1 = token ID ()
--> $$ = nterm expr ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = token ID (3.0: )
+-> $$ = nterm expr (3.0: )
 Entering state 8
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
+Next token is token '+' (3.2: )
+Shifting token '+' (3.2: )
 Entering state 15
 Reading a token
-Next token is token ID ()
-Shifting token ID ()
+Next token is token ID (3.4: )
+Shifting token ID (3.4: )
 Entering state 5
-Reducing stack 0 by rule 7 (line 80):
-   $1 = token ID ()
--> $$ = nterm expr ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = token ID (3.4: )
+-> $$ = nterm expr (3.4: )
 Entering state 25
-Reducing stack 0 by rule 9 (line 83):
-   $1 = nterm expr ()
-   $2 = token '+' ()
-   $3 = nterm expr ()
--> $$ = nterm expr ()
+Reducing stack 0 by rule 9 (line 93):
+   $1 = nterm expr (3.0: )
+   $2 = token '+' (3.2: )
+   $3 = nterm expr (3.4: )
+-> $$ = nterm expr (3.0-4: )
 Entering state 8
 Reading a token
-Next token is token ';' ()
-Shifting token ';' ()
+Next token is token ';' (3.5: )
+Shifting token ';' (3.5: )
 Entering state 16
-Reducing stack 0 by rule 3 (line 74):
-   $1 = nterm expr ()
-   $2 = token ';' ()
--> $$ = nterm stmt ()
+Reducing stack 0 by rule 3 (line 84):
+   $1 = nterm expr (3.0-4: )
+   $2 = token ';' (3.5: )
+-> $$ = nterm stmt (3.0-5: )
 Entering state 7
-Reducing stack 0 by rule 2 (line 65):
-   $1 = nterm prog ()
-   $2 = nterm stmt ()
--> $$ = nterm prog ()
+Reducing stack 0 by rule 2 (line 72):
+   $1 = nterm prog (1.1: )
+   $2 = nterm stmt (3.0-5: )
+-> $$ = nterm prog (1.1-3.5: )
 Entering state 1
 Reading a token
-Next token is token TYPENAME ()
-Shifting token TYPENAME ()
+Next token is token TYPENAME (5.0: )
+Shifting token TYPENAME (5.0: )
 Entering state 4
 Reading a token
-Next token is token ID ()
-Shifting token ID ()
+Next token is token ID (5.2: )
+Shifting token ID (5.2: )
 Entering state 11
-Reducing stack 0 by rule 13 (line 94):
-   $1 = token ID ()
--> $$ = nterm declarator ()
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token ID (5.2: )
+-> $$ = nterm declarator (5.2: )
 Entering state 13
 Reading a token
-Next token is token ';' ()
-Shifting token ';' ()
+Next token is token ';' (5.3: )
+Shifting token ';' (5.3: )
 Entering state 23
-Reducing stack 0 by rule 11 (line 87):
-   $1 = token TYPENAME ()
-   $2 = nterm declarator ()
-   $3 = token ';' ()
--> $$ = nterm decl ()
+Reducing stack 0 by rule 11 (line 97):
+   $1 = token TYPENAME (5.0: )
+   $2 = nterm declarator (5.2: )
+   $3 = token ';' (5.3: )
+-> $$ = nterm decl (5.0-3: )
 Entering state 9
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm decl ()
--> $$ = nterm stmt ()
+Reducing stack 0 by rule 4 (line 85):
+   $1 = nterm decl (5.0-3: )
+-> $$ = nterm stmt (5.0-3: )
 Entering state 7
-Reducing stack 0 by rule 2 (line 65):
-   $1 = nterm prog ()
-   $2 = nterm stmt ()
--> $$ = nterm prog ()
+Reducing stack 0 by rule 2 (line 72):
+   $1 = nterm prog (1.1-3.5: )
+   $2 = nterm stmt (5.0-3: )
+-> $$ = nterm prog (1.1-5.3: )
 Entering state 1
 Reading a token
-Next token is token TYPENAME ()
-Shifting token TYPENAME ()
+Next token is token TYPENAME (7.0: )
+Shifting token TYPENAME (7.0: )
 Entering state 4
 Reading a token
-Next token is token ID ()
-Shifting token ID ()
+Next token is token ID (7.2: )
+Shifting token ID (7.2: )
 Entering state 11
-Reducing stack 0 by rule 13 (line 94):
-   $1 = token ID ()
--> $$ = nterm declarator ()
+Reducing stack 0 by rule 13 (line 104):
+   $1 = token ID (7.2: )
+-> $$ = nterm declarator (7.2: )
 Entering state 13
 Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
+Next token is token '=' (7.4: )
+Shifting token '=' (7.4: )
 Entering state 22
 Reading a token
-Next token is token ID ()
-Shifting token ID ()
+Next token is token ID (7.6: )
+Shifting token ID (7.6: )
 Entering state 5
-Reducing stack 0 by rule 7 (line 80):
-   $1 = token ID ()
--> $$ = nterm expr ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = token ID (7.6: )
+-> $$ = nterm expr (7.6: )
 Entering state 29
 Reading a token
-Next token is token ';' ()
-Shifting token ';' ()
+Next token is token ';' (7.7: )
+Shifting token ';' (7.7: )
 Entering state 30
-Reducing stack 0 by rule 12 (line 89):
-   $1 = token TYPENAME ()
-   $2 = nterm declarator ()
-   $3 = token '=' ()
-   $4 = nterm expr ()
-   $5 = token ';' ()
--> $$ = nterm decl ()
+Reducing stack 0 by rule 12 (line 99):
+   $1 = token TYPENAME (7.0: )
+   $2 = nterm declarator (7.2: )
+   $3 = token '=' (7.4: )
+   $4 = nterm expr (7.6: )
+   $5 = token ';' (7.7: )
+-> $$ = nterm decl (7.0-7: )
 Entering state 9
-Reducing stack 0 by rule 4 (line 75):
-   $1 = nterm decl ()
--> $$ = nterm stmt ()
+Reducing stack 0 by rule 4 (line 85):
+   $1 = nterm decl (7.0-7: )
+-> $$ = nterm stmt (7.0-7: )
 Entering state 7
-Reducing stack 0 by rule 2 (line 65):
-   $1 = nterm prog ()
-   $2 = nterm stmt ()
--> $$ = nterm prog ()
+Reducing stack 0 by rule 2 (line 72):
+   $1 = nterm prog (1.1-5.3: )
+   $2 = nterm stmt (7.0-7: )
+-> $$ = nterm prog (1.1-7.7: )
 Entering state 1
 Reading a token
-Next token is token ID ()
-Shifting token ID ()
+Next token is token ID (9.0: )
+Shifting token ID (9.0: )
 Entering state 5
-Reducing stack 0 by rule 7 (line 80):
-   $1 = token ID ()
--> $$ = nterm expr ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = token ID (9.0: )
+-> $$ = nterm expr (9.0: )
 Entering state 8
 Reading a token
-Next token is token '=' ()
-Shifting token '=' ()
+Next token is token '=' (9.2: )
+Shifting token '=' (9.2: )
 Entering state 14
 Reading a token
-Next token is token ID ()
-Shifting token ID ()
+Next token is token ID (9.4: )
+Shifting token ID (9.4: )
 Entering state 5
-Reducing stack 0 by rule 7 (line 80):
-   $1 = token ID ()
--> $$ = nterm expr ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = token ID (9.4: )
+-> $$ = nterm expr (9.4: )
 Entering state 24
 Reading a token
-Next token is token ';' ()
-Reducing stack 0 by rule 10 (line 84):
-   $1 = nterm expr ()
-   $2 = token '=' ()
-   $3 = nterm expr ()
--> $$ = nterm expr ()
+Next token is token ';' (9.5: )
+Reducing stack 0 by rule 10 (line 94):
+   $1 = nterm expr (9.0: )
+   $2 = token '=' (9.2: )
+   $3 = nterm expr (9.4: )
+-> $$ = nterm expr (9.0-4: )
 Entering state 8
-Next token is token ';' ()
-Shifting token ';' ()
+Next token is token ';' (9.5: )
+Shifting token ';' (9.5: )
 Entering state 16
-Reducing stack 0 by rule 3 (line 74):
-   $1 = nterm expr ()
-   $2 = token ';' ()
--> $$ = nterm stmt ()
+Reducing stack 0 by rule 3 (line 84):
+   $1 = nterm expr (9.0-4: )
+   $2 = token ';' (9.5: )
+-> $$ = nterm stmt (9.0-5: )
 Entering state 7
-Reducing stack 0 by rule 2 (line 65):
-   $1 = nterm prog ()
-   $2 = nterm stmt ()
--> $$ = nterm prog ()
+Reducing stack 0 by rule 2 (line 72):
+   $1 = nterm prog (1.1-7.7: )
+   $2 = nterm stmt (9.0-5: )
+-> $$ = nterm prog (1.1-9.5: )
 Entering state 1
 Reading a token
-Next token is token TYPENAME ()
-Shifting token TYPENAME ()
+Next token is token TYPENAME (11.0: )
+Shifting token TYPENAME (11.0: )
 Entering state 4
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
+Next token is token '(' (11.2: )
+Shifting token '(' (11.2: )
 Entering state 12
 Reading a token
-Next token is token ID ()
-Shifting token ID ()
+Next token is token ID (11.3: )
+Shifting token ID (11.3: )
 Entering state 18
 Reading a token
-Next token is token ')' ()
+Next token is token ')' (11.4: )
 Stack 0 Entering state 18
-Next token is token ')' ()
+Next token is token ')' (11.4: )
 Splitting off stack 1 from 0.
-Reduced stack 1 by rule 13 (line 94); action deferred.  Now in state 21.
+Reduced stack 1 by rule 13 (line 104); action deferred.  Now in state 21.
 Stack 1 Entering state 21
-Next token is token ')' ()
-Reduced stack 0 by rule 7 (line 80); action deferred.  Now in state 20.
+Next token is token ')' (11.4: )
+Reduced stack 0 by rule 7 (line 90); action deferred.  Now in state 20.
 Stack 0 Entering state 20
-Next token is token ')' ()
+Next token is token ')' (11.4: )
 Stack 1 Entering state 21
-Next token is token ')' ()
-On stack 0, shifting token ')' ()
+Next token is token ')' (11.4: )
+On stack 0, shifting token ')' (11.4: )
 Stack 0 now in state 27
-On stack 1, shifting token ')' ()
+On stack 1, shifting token ')' (11.4: )
 Stack 1 now in state 28
 Stack 0 Entering state 27
-Reduced stack 0 by rule 8 (line 81); action deferred.  Now in state 8.
+Reduced stack 0 by rule 8 (line 91); action deferred.  Now in state 8.
 Stack 0 Entering state 8
 Reading a token
-Next token is token '+' ()
+Next token is token '+' (11.6: )
 Stack 1 Entering state 28
-Reduced stack 1 by rule 14 (line 95); action deferred.  Now in state 13.
+Reduced stack 1 by rule 14 (line 105); action deferred.  Now in state 13.
 Stack 1 Entering state 13
-Next token is token '+' ()
+Next token is token '+' (11.6: )
 Stack 1 dies.
 Removing dead stacks.
-On stack 0, shifting token '+' ()
+On stack 0, shifting token '+' (11.6: )
 Stack 0 now in state 15
-Reducing stack -1 by rule 7 (line 80):
-   $1 = token ID ()
--> $$ = nterm expr ()
-Reducing stack -1 by rule 8 (line 81):
-   $1 = token TYPENAME ()
-   $2 = token '(' ()
-   $3 = nterm expr ()
-   $4 = token ')' ()
--> $$ = nterm expr ()
+Reducing stack -1 by rule 7 (line 90):
+   $1 = token ID (11.3: )
+-> $$ = nterm expr (11.3: )
+Reducing stack -1 by rule 8 (line 91):
+   $1 = token TYPENAME (11.0: )
+   $2 = token '(' (11.2: )
+   $3 = nterm expr (11.3: )
+   $4 = token ')' (11.4: )
+-> $$ = nterm expr (11.0-4: )
 Returning to deterministic operation.
 Entering state 15
 Reading a token
-Next token is token ID ()
-Shifting token ID ()
+Next token is token ID (11.8: )
+Shifting token ID (11.8: )
 Entering state 5
-Reducing stack 0 by rule 7 (line 80):
-   $1 = token ID ()
--> $$ = nterm expr ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = token ID (11.8: )
+-> $$ = nterm expr (11.8: )
 Entering state 25
-Reducing stack 0 by rule 9 (line 83):
-   $1 = nterm expr ()
-   $2 = token '+' ()
-   $3 = nterm expr ()
--> $$ = nterm expr ()
+Reducing stack 0 by rule 9 (line 93):
+   $1 = nterm expr (11.0-4: )
+   $2 = token '+' (11.6: )
+   $3 = nterm expr (11.8: )
+-> $$ = nterm expr (11.0-8: )
 Entering state 8
 Reading a token
-Next token is token ';' ()
-Shifting token ';' ()
+Next token is token ';' (11.9: )
+Shifting token ';' (11.9: )
 Entering state 16
-Reducing stack 0 by rule 3 (line 74):
-   $1 = nterm expr ()
-   $2 = token ';' ()
--> $$ = nterm stmt ()
+Reducing stack 0 by rule 3 (line 84):
+   $1 = nterm expr (11.0-8: )
+   $2 = token ';' (11.9: )
+-> $$ = nterm stmt (11.0-9: )
 Entering state 7
-Reducing stack 0 by rule 2 (line 65):
-   $1 = nterm prog ()
-   $2 = nterm stmt ()
--> $$ = nterm prog ()
+Reducing stack 0 by rule 2 (line 72):
+   $1 = nterm prog (1.1-9.5: )
+   $2 = nterm stmt (11.0-9: )
+-> $$ = nterm prog (1.1-11.9: )
 Entering state 1
 Reading a token
-Next token is token TYPENAME ()
-Shifting token TYPENAME ()
+Next token is token TYPENAME (13.0: )
+Shifting token TYPENAME (13.0: )
 Entering state 4
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
+Next token is token '(' (13.2: )
+Shifting token '(' (13.2: )
 Entering state 12
 Reading a token
-Next token is token ID ()
-Shifting token ID ()
+Next token is token ID (13.3: )
+Shifting token ID (13.3: )
 Entering state 18
 Reading a token
-Next token is token ')' ()
+Next token is token ')' (13.4: )
 Stack 0 Entering state 18
-Next token is token ')' ()
+Next token is token ')' (13.4: )
 Splitting off stack 1 from 0.
-Reduced stack 1 by rule 13 (line 94); action deferred.  Now in state 21.
+Reduced stack 1 by rule 13 (line 104); action deferred.  Now in state 21.
 Stack 1 Entering state 21
-Next token is token ')' ()
-Reduced stack 0 by rule 7 (line 80); action deferred.  Now in state 20.
+Next token is token ')' (13.4: )
+Reduced stack 0 by rule 7 (line 90); action deferred.  Now in state 20.
 Stack 0 Entering state 20
-Next token is token ')' ()
+Next token is token ')' (13.4: )
 Stack 1 Entering state 21
-Next token is token ')' ()
-On stack 0, shifting token ')' ()
+Next token is token ')' (13.4: )
+On stack 0, shifting token ')' (13.4: )
 Stack 0 now in state 27
-On stack 1, shifting token ')' ()
+On stack 1, shifting token ')' (13.4: )
 Stack 1 now in state 28
 Stack 0 Entering state 27
-Reduced stack 0 by rule 8 (line 81); action deferred.  Now in state 8.
+Reduced stack 0 by rule 8 (line 91); action deferred.  Now in state 8.
 Stack 0 Entering state 8
 Reading a token
-Next token is token ';' ()
+Next token is token ';' (13.5: )
 Stack 1 Entering state 28
-Reduced stack 1 by rule 14 (line 95); action deferred.  Now in state 13.
+Reduced stack 1 by rule 14 (line 105); action deferred.  Now in state 13.
 Stack 1 Entering state 13
-Next token is token ';' ()
-On stack 0, shifting token ';' ()
+Next token is token ';' (13.5: )
+On stack 0, shifting token ';' (13.5: )
 Stack 0 now in state 16
-On stack 1, shifting token ';' ()
+On stack 1, shifting token ';' (13.5: )
 Stack 1 now in state 23
 Stack 0 Entering state 16
-Reduced stack 0 by rule 3 (line 74); action deferred.  Now in state 7.
+Reduced stack 0 by rule 3 (line 84); action deferred.  Now in state 7.
 Stack 0 Entering state 7
-Reduced stack 0 by rule 2 (line 65); action deferred.  Now in state 1.
+Reduced stack 0 by rule 2 (line 72); action deferred.  Now in state 1.
 Stack 0 Entering state 1
 Reading a token
-Next token is token TYPENAME ()
+Next token is token TYPENAME (15.0: )
 Stack 1 Entering state 23
-Reduced stack 1 by rule 11 (line 87); action deferred.  Now in state 9.
+Reduced stack 1 by rule 11 (line 97); action deferred.  Now in state 9.
 Stack 1 Entering state 9
-Reduced stack 1 by rule 4 (line 75); action deferred.  Now in state 7.
+Reduced stack 1 by rule 4 (line 85); action deferred.  Now in state 7.
 Stack 1 Entering state 7
-Reduced stack 1 by rule 2 (line 65); action deferred.  Now in state 1.
+Reduced stack 1 by rule 2 (line 72); action deferred.  Now in state 1.
 Merging stack 1 into stack 0.
 Removing dead stacks.
-On stack 0, shifting token TYPENAME ()
+On stack 0, shifting token TYPENAME (15.0: )
 Stack 0 now in state 4
-Reducing stack -1 by rule 13 (line 94):
-   $1 = token ID ()
--> $$ = nterm declarator ()
-Reducing stack -1 by rule 14 (line 95):
-   $1 = token '(' ()
-   $2 = nterm declarator ()
-   $3 = token ')' ()
--> $$ = nterm declarator ()
-Reducing stack -1 by rule 11 (line 87):
-   $1 = token TYPENAME ()
-   $2 = nterm declarator ()
-   $3 = token ';' ()
--> $$ = nterm decl ()
-Reducing stack -1 by rule 4 (line 75):
-   $1 = nterm decl ()
--> $$ = nterm stmt ()
-Reducing stack -1 by rule 7 (line 80):
-   $1 = token ID ()
--> $$ = nterm expr ()
-Reducing stack -1 by rule 8 (line 81):
-   $1 = token TYPENAME ()
-   $2 = token '(' ()
-   $3 = nterm expr ()
-   $4 = token ')' ()
--> $$ = nterm expr ()
-Reducing stack -1 by rule 3 (line 74):
-   $1 = nterm expr ()
-   $2 = token ';' ()
--> $$ = nterm stmt ()
-Reducing stack -1 by rule 2 (line 65):
-   $1 = nterm prog ()
-   $2 = nterm stmt ()
--> $$ = nterm prog ()
+Reducing stack -1 by rule 13 (line 104):
+   $1 = token ID (13.3: )
+-> $$ = nterm declarator (13.3: )
+Reducing stack -1 by rule 14 (line 105):
+   $1 = token '(' (13.2: )
+   $2 = nterm declarator (13.3: )
+   $3 = token ')' (13.4: )
+-> $$ = nterm declarator (13.2-4: )
+Reducing stack -1 by rule 11 (line 97):
+   $1 = token TYPENAME (13.0: )
+   $2 = nterm declarator (13.2-4: )
+   $3 = token ';' (13.5: )
+-> $$ = nterm decl (13.0-5: )
+Reducing stack -1 by rule 4 (line 85):
+   $1 = nterm decl (13.0-5: )
+-> $$ = nterm stmt (13.0-5: )
+Reducing stack -1 by rule 7 (line 90):
+   $1 = token ID (13.3: )
+-> $$ = nterm expr (13.3: )
+Reducing stack -1 by rule 8 (line 91):
+   $1 = token TYPENAME (13.0: )
+   $2 = token '(' (13.2: )
+   $3 = nterm expr (13.3: )
+   $4 = token ')' (13.4: )
+-> $$ = nterm expr (13.0-4: )
+Reducing stack -1 by rule 3 (line 84):
+   $1 = nterm expr (13.0-4: )
+   $2 = token ';' (13.5: )
+-> $$ = nterm stmt (13.0-5: )
+Reducing stack -1 by rule 2 (line 72):
+   $1 = nterm prog (1.1-11.9: )
+   $2 = nterm stmt (13.0-5: )
+-> $$ = nterm prog (1.1-13.5: )
 Returning to deterministic operation.
 Entering state 4
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
+Next token is token '(' (15.2: )
+Shifting token '(' (15.2: )
 Entering state 12
 Reading a token
-Next token is token ID ()
-Shifting token ID ()
+Next token is token ID (15.3: )
+Shifting token ID (15.3: )
 Entering state 18
 Reading a token
-Next token is token ')' ()
+Next token is token ')' (15.4: )
 Stack 0 Entering state 18
-Next token is token ')' ()
+Next token is token ')' (15.4: )
 Splitting off stack 1 from 0.
-Reduced stack 1 by rule 13 (line 94); action deferred.  Now in state 21.
+Reduced stack 1 by rule 13 (line 104); action deferred.  Now in state 21.
 Stack 1 Entering state 21
-Next token is token ')' ()
-Reduced stack 0 by rule 7 (line 80); action deferred.  Now in state 20.
+Next token is token ')' (15.4: )
+Reduced stack 0 by rule 7 (line 90); action deferred.  Now in state 20.
 Stack 0 Entering state 20
-Next token is token ')' ()
+Next token is token ')' (15.4: )
 Stack 1 Entering state 21
-Next token is token ')' ()
-On stack 0, shifting token ')' ()
+Next token is token ')' (15.4: )
+On stack 0, shifting token ')' (15.4: )
 Stack 0 now in state 27
-On stack 1, shifting token ')' ()
+On stack 1, shifting token ')' (15.4: )
 Stack 1 now in state 28
 Stack 0 Entering state 27
-Reduced stack 0 by rule 8 (line 81); action deferred.  Now in state 8.
+Reduced stack 0 by rule 8 (line 91); action deferred.  Now in state 8.
 Stack 0 Entering state 8
 Reading a token
-Next token is token '=' ()
+Next token is token '=' (15.6: )
 Stack 1 Entering state 28
-Reduced stack 1 by rule 14 (line 95); action deferred.  Now in state 13.
+Reduced stack 1 by rule 14 (line 105); action deferred.  Now in state 13.
 Stack 1 Entering state 13
-Next token is token '=' ()
-On stack 0, shifting token '=' ()
+Next token is token '=' (15.6: )
+On stack 0, shifting token '=' (15.6: )
 Stack 0 now in state 14
-On stack 1, shifting token '=' ()
+On stack 1, shifting token '=' (15.6: )
 Stack 1 now in state 22
 Stack 0 Entering state 14
 Reading a token
-Next token is token ID ()
+Next token is token ID (15.8: )
 Stack 1 Entering state 22
-Next token is token ID ()
-On stack 0, shifting token ID ()
+Next token is token ID (15.8: )
+On stack 0, shifting token ID (15.8: )
 Stack 0 now in state 5
-On stack 1, shifting token ID ()
+On stack 1, shifting token ID (15.8: )
 Stack 1 now in state 5
 Stack 0 Entering state 5
-Reduced stack 0 by rule 7 (line 80); action deferred.  Now in state 24.
+Reduced stack 0 by rule 7 (line 90); action deferred.  Now in state 24.
 Stack 0 Entering state 24
 Reading a token
-Next token is token '+' ()
+Next token is token '+' (15.10: )
 Stack 1 Entering state 5
-Reduced stack 1 by rule 7 (line 80); action deferred.  Now in state 29.
+Reduced stack 1 by rule 7 (line 90); action deferred.  Now in state 29.
 Stack 1 Entering state 29
-Next token is token '+' ()
-On stack 0, shifting token '+' ()
+Next token is token '+' (15.10: )
+On stack 0, shifting token '+' (15.10: )
 Stack 0 now in state 15
-On stack 1, shifting token '+' ()
+On stack 1, shifting token '+' (15.10: )
 Stack 1 now in state 15
 Stack 0 Entering state 15
 Reading a token
-Next token is token ID ()
+Next token is token ID (15.12: )
 Stack 1 Entering state 15
-Next token is token ID ()
-On stack 0, shifting token ID ()
+Next token is token ID (15.12: )
+On stack 0, shifting token ID (15.12: )
 Stack 0 now in state 5
-On stack 1, shifting token ID ()
+On stack 1, shifting token ID (15.12: )
 Stack 1 now in state 5
 Stack 0 Entering state 5
-Reduced stack 0 by rule 7 (line 80); action deferred.  Now in state 25.
+Reduced stack 0 by rule 7 (line 90); action deferred.  Now in state 25.
 Stack 0 Entering state 25
-Reduced stack 0 by rule 9 (line 83); action deferred.  Now in state 24.
+Reduced stack 0 by rule 9 (line 93); action deferred.  Now in state 24.
 Stack 0 Entering state 24
 Reading a token
-Next token is token ';' ()
-Reduced stack 0 by rule 10 (line 84); action deferred.  Now in state 8.
+Next token is token ';' (15.13: )
+Reduced stack 0 by rule 10 (line 94); action deferred.  Now in state 8.
 Stack 0 Entering state 8
-Next token is token ';' ()
+Next token is token ';' (15.13: )
 Stack 1 Entering state 5
-Reduced stack 1 by rule 7 (line 80); action deferred.  Now in state 25.
+Reduced stack 1 by rule 7 (line 90); action deferred.  Now in state 25.
 Stack 1 Entering state 25
-Reduced stack 1 by rule 9 (line 83); action deferred.  Now in state 29.
+Reduced stack 1 by rule 9 (line 93); action deferred.  Now in state 29.
 Stack 1 Entering state 29
-Next token is token ';' ()
-On stack 0, shifting token ';' ()
+Next token is token ';' (15.13: )
+On stack 0, shifting token ';' (15.13: )
 Stack 0 now in state 16
-On stack 1, shifting token ';' ()
+On stack 1, shifting token ';' (15.13: )
 Stack 1 now in state 30
 Stack 0 Entering state 16
-Reduced stack 0 by rule 3 (line 74); action deferred.  Now in state 7.
+Reduced stack 0 by rule 3 (line 84); action deferred.  Now in state 7.
 Stack 0 Entering state 7
-Reduced stack 0 by rule 2 (line 65); action deferred.  Now in state 1.
+Reduced stack 0 by rule 2 (line 72); action deferred.  Now in state 1.
 Stack 0 Entering state 1
 Reading a token
-Next token is token TYPENAME ()
+Next token is token TYPENAME (17.0: )
 Stack 1 Entering state 30
-Reduced stack 1 by rule 12 (line 89); action deferred.  Now in state 9.
+Reduced stack 1 by rule 12 (line 99); action deferred.  Now in state 9.
 Stack 1 Entering state 9
-Reduced stack 1 by rule 4 (line 75); action deferred.  Now in state 7.
+Reduced stack 1 by rule 4 (line 85); action deferred.  Now in state 7.
 Stack 1 Entering state 7
-Reduced stack 1 by rule 2 (line 65); action deferred.  Now in state 1.
+Reduced stack 1 by rule 2 (line 72); action deferred.  Now in state 1.
 Merging stack 1 into stack 0.
 Removing dead stacks.
-On stack 0, shifting token TYPENAME ()
+On stack 0, shifting token TYPENAME (17.0: )
 Stack 0 now in state 4
-Reducing stack -1 by rule 13 (line 94):
-   $1 = token ID ()
--> $$ = nterm declarator ()
-Reducing stack -1 by rule 14 (line 95):
-   $1 = token '(' ()
-   $2 = nterm declarator ()
-   $3 = token ')' ()
--> $$ = nterm declarator ()
-Reducing stack -1 by rule 7 (line 80):
-   $1 = token ID ()
--> $$ = nterm expr ()
-Reducing stack -1 by rule 7 (line 80):
-   $1 = token ID ()
--> $$ = nterm expr ()
-Reducing stack -1 by rule 9 (line 83):
-   $1 = nterm expr ()
-   $2 = token '+' ()
-   $3 = nterm expr ()
--> $$ = nterm expr ()
-Reducing stack -1 by rule 12 (line 89):
-   $1 = token TYPENAME ()
-   $2 = nterm declarator ()
-   $3 = token '=' ()
-   $4 = nterm expr ()
-   $5 = token ';' ()
--> $$ = nterm decl ()
-Reducing stack -1 by rule 4 (line 75):
-   $1 = nterm decl ()
--> $$ = nterm stmt ()
-Reducing stack -1 by rule 7 (line 80):
-   $1 = token ID ()
--> $$ = nterm expr ()
-Reducing stack -1 by rule 8 (line 81):
-   $1 = token TYPENAME ()
-   $2 = token '(' ()
-   $3 = nterm expr ()
-   $4 = token ')' ()
--> $$ = nterm expr ()
-Reducing stack -1 by rule 7 (line 80):
-   $1 = token ID ()
--> $$ = nterm expr ()
-Reducing stack -1 by rule 7 (line 80):
-   $1 = token ID ()
--> $$ = nterm expr ()
-Reducing stack -1 by rule 9 (line 83):
-   $1 = nterm expr ()
-   $2 = token '+' ()
-   $3 = nterm expr ()
--> $$ = nterm expr ()
-Reducing stack -1 by rule 10 (line 84):
-   $1 = nterm expr ()
-   $2 = token '=' ()
-   $3 = nterm expr ()
--> $$ = nterm expr ()
-Reducing stack -1 by rule 3 (line 74):
-   $1 = nterm expr ()
-   $2 = token ';' ()
--> $$ = nterm stmt ()
-Reducing stack -1 by rule 2 (line 65):
-   $1 = nterm prog ()
-   $2 = nterm stmt ()
--> $$ = nterm prog ()
+Reducing stack -1 by rule 13 (line 104):
+   $1 = token ID (15.3: )
+-> $$ = nterm declarator (15.3: )
+Reducing stack -1 by rule 14 (line 105):
+   $1 = token '(' (15.2: )
+   $2 = nterm declarator (15.3: )
+   $3 = token ')' (15.4: )
+-> $$ = nterm declarator (15.2-4: )
+Reducing stack -1 by rule 7 (line 90):
+   $1 = token ID (15.8: )
+-> $$ = nterm expr (15.8: )
+Reducing stack -1 by rule 7 (line 90):
+   $1 = token ID (15.12: )
+-> $$ = nterm expr (15.12: )
+Reducing stack -1 by rule 9 (line 93):
+   $1 = nterm expr (15.8: )
+   $2 = token '+' (15.10: )
+   $3 = nterm expr (15.12: )
+-> $$ = nterm expr (15.8-12: )
+Reducing stack -1 by rule 12 (line 99):
+   $1 = token TYPENAME (15.0: )
+   $2 = nterm declarator (15.2-4: )
+   $3 = token '=' (15.6: )
+   $4 = nterm expr (15.8-12: )
+   $5 = token ';' (15.13: )
+-> $$ = nterm decl (15.0-13: )
+Reducing stack -1 by rule 4 (line 85):
+   $1 = nterm decl (15.0-13: )
+-> $$ = nterm stmt (15.0-13: )
+Reducing stack -1 by rule 7 (line 90):
+   $1 = token ID (15.3: )
+-> $$ = nterm expr (15.3: )
+Reducing stack -1 by rule 8 (line 91):
+   $1 = token TYPENAME (15.0: )
+   $2 = token '(' (15.2: )
+   $3 = nterm expr (15.3: )
+   $4 = token ')' (15.4: )
+-> $$ = nterm expr (15.0-4: )
+Reducing stack -1 by rule 7 (line 90):
+   $1 = token ID (15.8: )
+-> $$ = nterm expr (15.8: )
+Reducing stack -1 by rule 7 (line 90):
+   $1 = token ID (15.12: )
+-> $$ = nterm expr (15.12: )
+Reducing stack -1 by rule 9 (line 93):
+   $1 = nterm expr (15.8: )
+   $2 = token '+' (15.10: )
+   $3 = nterm expr (15.12: )
+-> $$ = nterm expr (15.8-12: )
+Reducing stack -1 by rule 10 (line 94):
+   $1 = nterm expr (15.0-4: )
+   $2 = token '=' (15.6: )
+   $3 = nterm expr (15.8-12: )
+-> $$ = nterm expr (15.0-12: )
+Reducing stack -1 by rule 3 (line 84):
+   $1 = nterm expr (15.0-12: )
+   $2 = token ';' (15.13: )
+-> $$ = nterm stmt (15.0-13: )
+Reducing stack -1 by rule 2 (line 72):
+   $1 = nterm prog (1.1-13.5: )
+   $2 = nterm stmt (15.0-13: )
+-> $$ = nterm prog (1.1-15.13: )
 Returning to deterministic operation.
 Entering state 4
 Reading a token
-Next token is token '(' ()
-Shifting token '(' ()
+Next token is token '(' (17.2: )
+Shifting token '(' (17.2: )
 Entering state 12
 Reading a token
-Next token is token ID ()
-Shifting token ID ()
+Next token is token ID (17.3: )
+Shifting token ID (17.3: )
 Entering state 18
 Reading a token
-Next token is token ID ()
-Reducing stack 0 by rule 7 (line 80):
-   $1 = token ID ()
--> $$ = nterm expr ()
+Next token is token ID (17.5: )
+Reducing stack 0 by rule 7 (line 90):
+   $1 = token ID (17.3: )
+-> $$ = nterm expr (17.3: )
 Entering state 20
-Next token is token ID ()
-syntax error
-Error: popping nterm expr ()
-Error: popping token '(' ()
-Error: popping token TYPENAME ()
-Shifting token error ()
+Next token is token ID (17.5: )
+17.5: syntax error
+Error: popping nterm expr (17.3: )
+Error: popping token '(' (17.2: )
+Error: popping token TYPENAME (17.0: )
+Shifting token error (17.0-5: )
 Entering state 3
-Next token is token ID ()
-Error: discarding token ID ()
+Next token is token ID (17.5: )
+Error: discarding token ID (17.5: )
 Reading a token
-Next token is token ')' ()
-Error: discarding token ')' ()
+Next token is token ')' (17.6: )
+Error: discarding token ')' (17.6: )
 Reading a token
-Next token is token '=' ()
-Error: discarding token '=' ()
+Next token is token '=' (17.8: )
+Error: discarding token '=' (17.8: )
 Reading a token
-Next token is token ID ()
-Error: discarding token ID ()
+Next token is token ID (17.10: )
+Error: discarding token ID (17.10: )
 Reading a token
-Next token is token '+' ()
-Error: discarding token '+' ()
+Next token is token '+' (17.12: )
+Error: discarding token '+' (17.12: )
 Reading a token
-Next token is token ID ()
-Error: discarding token ID ()
+Next token is token ID (17.14: )
+Error: discarding token ID (17.14: )
 Reading a token
-Next token is token ';' ()
+Next token is token ';' (17.15: )
 Entering state 3
-Next token is token ';' ()
-Shifting token ';' ()
+Next token is token ';' (17.15: )
+Shifting token ';' (17.15: )
 Entering state 10
-Reducing stack 0 by rule 5 (line 76):
-   $1 = token error ()
-   $2 = token ';' ()
--> $$ = nterm stmt ()
+Reducing stack 0 by rule 5 (line 86):
+   $1 = token error (17.0-14: )
+   $2 = token ';' (17.15: )
+-> $$ = nterm stmt (17.0-15: )
 Entering state 7
-Reducing stack 0 by rule 2 (line 65):
-   $1 = nterm prog ()
-   $2 = nterm stmt ()
--> $$ = nterm prog ()
+Reducing stack 0 by rule 2 (line 72):
+   $1 = nterm prog (1.1-15.13: )
+   $2 = nterm stmt (17.0-15: )
+-> $$ = nterm prog (1.1-17.15: )
 Entering state 1
 Reading a token
-Next token is token ID ()
-Shifting token ID ()
+Next token is token ID (19.0: )
+Shifting token ID (19.0: )
 Entering state 5
-Reducing stack 0 by rule 7 (line 80):
-   $1 = token ID ()
--> $$ = nterm expr ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = token ID (19.0: )
+-> $$ = nterm expr (19.0: )
 Entering state 8
 Reading a token
-Next token is token '+' ()
-Shifting token '+' ()
+Next token is token '+' (19.2: )
+Shifting token '+' (19.2: )
 Entering state 15
 Reading a token
-Next token is token ID ()
-Shifting token ID ()
+Next token is token ID (19.4: )
+Shifting token ID (19.4: )
 Entering state 5
-Reducing stack 0 by rule 7 (line 80):
-   $1 = token ID ()
--> $$ = nterm expr ()
+Reducing stack 0 by rule 7 (line 90):
+   $1 = token ID (19.4: )
+-> $$ = nterm expr (19.4: )
 Entering state 25
-Reducing stack 0 by rule 9 (line 83):
-   $1 = nterm expr ()
-   $2 = token '+' ()
-   $3 = nterm expr ()
--> $$ = nterm expr ()
+Reducing stack 0 by rule 9 (line 93):
+   $1 = nterm expr (19.0: )
+   $2 = token '+' (19.2: )
+   $3 = nterm expr (19.4: )
+-> $$ = nterm expr (19.0-4: )
 Entering state 8
 Reading a token
-Next token is token ';' ()
-Shifting token ';' ()
+Next token is token ';' (19.5: )
+Shifting token ';' (19.5: )
 Entering state 16
-Reducing stack 0 by rule 3 (line 74):
-   $1 = nterm expr ()
-   $2 = token ';' ()
--> $$ = nterm stmt ()
+Reducing stack 0 by rule 3 (line 84):
+   $1 = nterm expr (19.0-4: )
+   $2 = token ';' (19.5: )
+-> $$ = nterm stmt (19.0-5: )
 Entering state 7
-Reducing stack 0 by rule 2 (line 65):
-   $1 = nterm prog ()
-   $2 = nterm stmt ()
--> $$ = nterm prog ()
+Reducing stack 0 by rule 2 (line 72):
+   $1 = nterm prog (1.1-17.15: )
+   $2 = nterm stmt (19.0-5: )
+-> $$ = nterm prog (1.1-19.5: )
 Entering state 1
 Reading a token
-Next token is token '@' ()
-Shifting token '@' ()
+Next token is token '@' (21.0: )
+Shifting token '@' (21.0: )
 Entering state 6
-Reducing stack 0 by rule 6 (line 77):
-   $1 = token '@' ()
-Cleanup: popping nterm prog ()
-./cxx-type.at:447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+Reducing stack 0 by rule 6 (line 87):
+   $1 = token '@' (21.0: )
+Cleanup: popping nterm prog (1.1-19.5: )
+./cxx-type.at:447:  $PREPARSER ./types -p test-input
+./cxx-type.at:452: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 Starting parse
 Entering state 0
@@ -266740,26 +266801,8 @@
 Reducing stack 0 by rule 6 (line 77):
    $1 = token '@' ()
 Cleanup: popping nterm prog ()
-711. cxx-type.at:444:  ok
-
-714. glr-regression.at:205: testing Badly Collapsed GLR States: glr.c ...
-./glr-regression.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o glr-regr1.c glr-regr1.y
-./glr-regression.at:205: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr1 glr-regr1.c $LIBS
-stderr:
-stdout:
-./c++.at:859:  $PREPARSER ./input
-stderr:
-./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:859: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./cxx-type.at:452:  $PREPARSER ./types test-input
-stderr:
-17.5: syntax error
-./cxx-type.at:452: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./cxx-type.at:452:  $PREPARSER ./types -p test-input
 stderr:
+./cxx-type.at:447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 Starting parse
 Entering state 0
 Reducing stack 0 by rule 1 (line 71):
@@ -267368,873 +267411,632 @@
 Reducing stack 0 by rule 6 (line 87):
    $1 = token '@' (21.0: )
 Cleanup: popping nterm prog (1.1-19.5: )
-./cxx-type.at:452: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+712. cxx-type.at:449:  ok
 Starting parse
 Entering state 0
-Reducing stack 0 by rule 1 (line 71):
--> $$ = nterm prog (1.1: )
+Reducing stack 0 by rule 1 (line 64):
+-> $$ = nterm prog ()
 Entering state 1
 Reading a token
-Next token is token ID (3.0: )
-Shifting token ID (3.0: )
+Next token is token ID ()
+Shifting token ID ()
 Entering state 5
-Reducing stack 0 by rule 7 (line 90):
-   $1 = token ID (3.0: )
--> $$ = nterm expr (3.0: )
+Reducing stack 0 by rule 7 (line 80):
+   $1 = token ID ()
+-> $$ = nterm expr ()
 Entering state 8
 Reading a token
-Next token is token '+' (3.2: )
-Shifting token '+' (3.2: )
+Next token is token '+' ()
+Shifting token '+' ()
 Entering state 15
 Reading a token
-Next token is token ID (3.4: )
-Shifting token ID (3.4: )
+Next token is token ID ()
+Shifting token ID ()
 Entering state 5
-Reducing stack 0 by rule 7 (line 90):
-   $1 = token ID (3.4: )
--> $$ = nterm expr (3.4: )
+Reducing stack 0 by rule 7 (line 80):
+   $1 = token ID ()
+-> $$ = nterm expr ()
 Entering state 25
-Reducing stack 0 by rule 9 (line 93):
-   $1 = nterm expr (3.0: )
-   $2 = token '+' (3.2: )
-   $3 = nterm expr (3.4: )
--> $$ = nterm expr (3.0-4: )
+Reducing stack 0 by rule 9 (line 83):
+   $1 = nterm expr ()
+   $2 = token '+' ()
+   $3 = nterm expr ()
+-> $$ = nterm expr ()
 Entering state 8
 Reading a token
-Next token is token ';' (3.5: )
-Shifting token ';' (3.5: )
+Next token is token ';' ()
+Shifting token ';' ()
 Entering state 16
-Reducing stack 0 by rule 3 (line 84):
-   $1 = nterm expr (3.0-4: )
-   $2 = token ';' (3.5: )
--> $$ = nterm stmt (3.0-5: )
+Reducing stack 0 by rule 3 (line 74):
+   $1 = nterm expr ()
+   $2 = token ';' ()
+-> $$ = nterm stmt ()
 Entering state 7
-Reducing stack 0 by rule 2 (line 72):
-   $1 = nterm prog (1.1: )
-   $2 = nterm stmt (3.0-5: )
--> $$ = nterm prog (1.1-3.5: )
+Reducing stack 0 by rule 2 (line 65):
+   $1 = nterm prog ()
+   $2 = nterm stmt ()
+-> $$ = nterm prog ()
 Entering state 1
 Reading a token
-Next token is token TYPENAME (5.0: )
-Shifting token TYPENAME (5.0: )
+Next token is token TYPENAME ()
+Shifting token TYPENAME ()
 Entering state 4
 Reading a token
-Next token is token ID (5.2: )
-Shifting token ID (5.2: )
+Next token is token ID ()
+Shifting token ID ()
 Entering state 11
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token ID (5.2: )
--> $$ = nterm declarator (5.2: )
+Reducing stack 0 by rule 13 (line 94):
+   $1 = token ID ()
+-> $$ = nterm declarator ()
 Entering state 13
 Reading a token
-Next token is token ';' (5.3: )
-Shifting token ';' (5.3: )
+Next token is token ';' ()
+Shifting token ';' ()
 Entering state 23
-Reducing stack 0 by rule 11 (line 97):
-   $1 = token TYPENAME (5.0: )
-   $2 = nterm declarator (5.2: )
-   $3 = token ';' (5.3: )
--> $$ = nterm decl (5.0-3: )
+Reducing stack 0 by rule 11 (line 87):
+   $1 = token TYPENAME ()
+   $2 = nterm declarator ()
+   $3 = token ';' ()
+-> $$ = nterm decl ()
 Entering state 9
-Reducing stack 0 by rule 4 (line 85):
-   $1 = nterm decl (5.0-3: )
--> $$ = nterm stmt (5.0-3: )
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm decl ()
+-> $$ = nterm stmt ()
 Entering state 7
-Reducing stack 0 by rule 2 (line 72):
-   $1 = nterm prog (1.1-3.5: )
-   $2 = nterm stmt (5.0-3: )
--> $$ = nterm prog (1.1-5.3: )
+Reducing stack 0 by rule 2 (line 65):
+   $1 = nterm prog ()
+   $2 = nterm stmt ()
+-> $$ = nterm prog ()
 Entering state 1
 Reading a token
-Next token is token TYPENAME (7.0: )
-Shifting token TYPENAME (7.0: )
+Next token is token TYPENAME ()
+Shifting token TYPENAME ()
 Entering state 4
 Reading a token
-Next token is token ID (7.2: )
-Shifting token ID (7.2: )
+Next token is token ID ()
+Shifting token ID ()
 Entering state 11
-Reducing stack 0 by rule 13 (line 104):
-   $1 = token ID (7.2: )
--> $$ = nterm declarator (7.2: )
+Reducing stack 0 by rule 13 (line 94):
+   $1 = token ID ()
+-> $$ = nterm declarator ()
 Entering state 13
 Reading a token
-Next token is token '=' (7.4: )
-Shifting token '=' (7.4: )
+Next token is token '=' ()
+Shifting token '=' ()
 Entering state 22
 Reading a token
-Next token is token ID (7.6: )
-Shifting token ID (7.6: )
+Next token is token ID ()
+Shifting token ID ()
 Entering state 5
-Reducing stack 0 by rule 7 (line 90):
-   $1 = token ID (7.6: )
--> $$ = nterm expr (7.6: )
+Reducing stack 0 by rule 7 (line 80):
+   $1 = token ID ()
+-> $$ = nterm expr ()
 Entering state 29
 Reading a token
-Next token is token ';' (7.7: )
-Shifting token ';' (7.7: )
+Next token is token ';' ()
+Shifting token ';' ()
 Entering state 30
-Reducing stack 0 by rule 12 (line 99):
-   $1 = token TYPENAME (7.0: )
-   $2 = nterm declarator (7.2: )
-   $3 = token '=' (7.4: )
-   $4 = nterm expr (7.6: )
-   $5 = token ';' (7.7: )
--> $$ = nterm decl (7.0-7: )
+Reducing stack 0 by rule 12 (line 89):
+   $1 = token TYPENAME ()
+   $2 = nterm declarator ()
+   $3 = token '=' ()
+   $4 = nterm expr ()
+   $5 = token ';' ()
+-> $$ = nterm decl ()
 Entering state 9
-Reducing stack 0 by rule 4 (line 85):
-   $1 = nterm decl (7.0-7: )
--> $$ = nterm stmt (7.0-7: )
+Reducing stack 0 by rule 4 (line 75):
+   $1 = nterm decl ()
+-> $$ = nterm stmt ()
 Entering state 7
-Reducing stack 0 by rule 2 (line 72):
-   $1 = nterm prog (1.1-5.3: )
-   $2 = nterm stmt (7.0-7: )
--> $$ = nterm prog (1.1-7.7: )
+Reducing stack 0 by rule 2 (line 65):
+   $1 = nterm prog ()
+   $2 = nterm stmt ()
+-> $$ = nterm prog ()
 Entering state 1
 Reading a token
-Next token is token ID (9.0: )
-Shifting token ID (9.0: )
+Next token is token ID ()
+Shifting token ID ()
 Entering state 5
-Reducing stack 0 by rule 7 (line 90):
-   $1 = token ID (9.0: )
--> $$ = nterm expr (9.0: )
+Reducing stack 0 by rule 7 (line 80):
+   $1 = token ID ()
+-> $$ = nterm expr ()
 Entering state 8
 Reading a token
-Next token is token '=' (9.2: )
-Shifting token '=' (9.2: )
+Next token is token '=' ()
+Shifting token '=' ()
 Entering state 14
 Reading a token
-Next token is token ID (9.4: )
-Shifting token ID (9.4: )
+Next token is token ID ()
+Shifting token ID ()
 Entering state 5
-Reducing stack 0 by rule 7 (line 90):
-   $1 = token ID (9.4: )
--> $$ = nterm expr (9.4: )
+Reducing stack 0 by rule 7 (line 80):
+   $1 = token ID ()
+-> $$ = nterm expr ()
 Entering state 24
 Reading a token
-Next token is token ';' (9.5: )
-Reducing stack 0 by rule 10 (line 94):
-   $1 = nterm expr (9.0: )
-   $2 = token '=' (9.2: )
-   $3 = nterm expr (9.4: )
--> $$ = nterm expr (9.0-4: )
+Next token is token ';' ()
+Reducing stack 0 by rule 10 (line 84):
+   $1 = nterm expr ()
+   $2 = token '=' ()
+   $3 = nterm expr ()
+-> $$ = nterm expr ()
 Entering state 8
-Next token is token ';' (9.5: )
-Shifting token ';' (9.5: )
+Next token is token ';' ()
+Shifting token ';' ()
 Entering state 16
-Reducing stack 0 by rule 3 (line 84):
-   $1 = nterm expr (9.0-4: )
-   $2 = token ';' (9.5: )
--> $$ = nterm stmt (9.0-5: )
+Reducing stack 0 by rule 3 (line 74):
+   $1 = nterm expr ()
+   $2 = token ';' ()
+-> $$ = nterm stmt ()
 Entering state 7
-Reducing stack 0 by rule 2 (line 72):
-   $1 = nterm prog (1.1-7.7: )
-   $2 = nterm stmt (9.0-5: )
--> $$ = nterm prog (1.1-9.5: )
+Reducing stack 0 by rule 2 (line 65):
+   $1 = nterm prog ()
+   $2 = nterm stmt ()
+-> $$ = nterm prog ()
 Entering state 1
 Reading a token
-Next token is token TYPENAME (11.0: )
-Shifting token TYPENAME (11.0: )
+Next token is token TYPENAME ()
+Shifting token TYPENAME ()
 Entering state 4
 Reading a token
-Next token is token '(' (11.2: )
-Shifting token '(' (11.2: )
+Next token is token '(' ()
+Shifting token '(' ()
 Entering state 12
 Reading a token
-Next token is token ID (11.3: )
-Shifting token ID (11.3: )
+Next token is token ID ()
+Shifting token ID ()
 Entering state 18
 Reading a token
-Next token is token ')' (11.4: )
+Next token is token ')' ()
 Stack 0 Entering state 18
-Next token is token ')' (11.4: )
+Next token is token ')' ()
 Splitting off stack 1 from 0.
-Reduced stack 1 by rule 13 (line 104); action deferred.  Now in state 21.
+Reduced stack 1 by rule 13 (line 94); action deferred.  Now in state 21.
 Stack 1 Entering state 21
-Next token is token ')' (11.4: )
-Reduced stack 0 by rule 7 (line 90); action deferred.  Now in state 20.
+Next token is token ')' ()
+Reduced stack 0 by rule 7 (line 80); action deferred.  Now in state 20.
 Stack 0 Entering state 20
-Next token is token ')' (11.4: )
+Next token is token ')' ()
 Stack 1 Entering state 21
-Next token is token ')' (11.4: )
-On stack 0, shifting token ')' (11.4: )
+Next token is token ')' ()
+On stack 0, shifting token ')' ()
 Stack 0 now in state 27
-On stack 1, shifting token ')' (11.4: )
+On stack 1, shifting token ')' ()
 Stack 1 now in state 28
 Stack 0 Entering state 27
-Reduced stack 0 by rule 8 (line 91); action deferred.  Now in state 8.
+Reduced stack 0 by rule 8 (line 81); action deferred.  Now in state 8.
 Stack 0 Entering state 8
 Reading a token
-Next token is token '+' (11.6: )
+Next token is token '+' ()
 Stack 1 Entering state 28
-Reduced stack 1 by rule 14 (line 105); action deferred.  Now in state 13.
+Reduced stack 1 by rule 14 (line 95); action deferred.  Now in state 13.
 Stack 1 Entering state 13
-Next token is token '+' (11.6: )
+Next token is token '+' ()
 Stack 1 dies.
 Removing dead stacks.
-On stack 0, shifting token '+' (11.6: )
+On stack 0, shifting token '+' ()
 Stack 0 now in state 15
-Reducing stack -1 by rule 7 (line 90):
-   $1 = token ID (11.3: )
--> $$ = nterm expr (11.3: )
-Reducing stack -1 by rule 8 (line 91):
-   $1 = token TYPENAME (11.0: )
-   $2 = token '(' (11.2: )
-   $3 = nterm expr (11.3: )
-   $4 = token ')' (11.4: )
--> $$ = nterm expr (11.0-4: )
+Reducing stack -1 by rule 7 (line 80):
+   $1 = token ID ()
+-> $$ = nterm expr ()
+Reducing stack -1 by rule 8 (line 81):
+   $1 = token TYPENAME ()
+   $2 = token '(' ()
+   $3 = nterm expr ()
+   $4 = token ')' ()
+-> $$ = nterm expr ()
 Returning to deterministic operation.
 Entering state 15
 Reading a token
-Next token is token ID (11.8: )
-Shifting token ID (11.8: )
+Next token is token ID ()
+Shifting token ID ()
 Entering state 5
-Reducing stack 0 by rule 7 (line 90):
-   $1 = token ID (11.8: )
--> $$ = nterm expr (11.8: )
+Reducing stack 0 by rule 7 (line 80):
+   $1 = token ID ()
+-> $$ = nterm expr ()
 Entering state 25
-Reducing stack 0 by rule 9 (line 93):
-   $1 = nterm expr (11.0-4: )
-   $2 = token '+' (11.6: )
-   $3 = nterm expr (11.8: )
--> $$ = nterm expr (11.0-8: )
+Reducing stack 0 by rule 9 (line 83):
+   $1 = nterm expr ()
+   $2 = token '+' ()
+   $3 = nterm expr ()
+-> $$ = nterm expr ()
 Entering state 8
 Reading a token
-Next token is token ';' (11.9: )
-Shifting token ';' (11.9: )
+Next token is token ';' ()
+Shifting token ';' ()
 Entering state 16
-Reducing stack 0 by rule 3 (line 84):
-   $1 = nterm expr (11.0-8: )
-   $2 = token ';' (11.9: )
--> $$ = nterm stmt (11.0-9: )
+Reducing stack 0 by rule 3 (line 74):
+   $1 = nterm expr ()
+   $2 = token ';' ()
+-> $$ = nterm stmt ()
 Entering state 7
-Reducing stack 0 by rule 2 (line 72):
-   $1 = nterm prog (1.1-9.5: )
-   $2 = nterm stmt (11.0-9: )
--> $$ = nterm prog (1.1-11.9: )
+Reducing stack 0 by rule 2 (line 65):
+   $1 = nterm prog ()
+   $2 = nterm stmt ()
+-> $$ = nterm prog ()
 Entering state 1
 Reading a token
-Next token is token TYPENAME (13.0: )
-Shifting token TYPENAME (13.0: )
+Next token is token TYPENAME ()
+Shifting token TYPENAME ()
 Entering state 4
 Reading a token
-Next token is token '(' (13.2: )
-Shifting token '(' (13.2: )
+Next token is token '(' ()
+Shifting token '(' ()
 Entering state 12
 Reading a token
-Next token is token ID (13.3: )
-Shifting token ID (13.3: )
+Next token is token ID ()
+Shifting token ID ()
 Entering state 18
 Reading a token
-Next token is token ')' (13.4: )
+Next token is token ')' ()
 Stack 0 Entering state 18
-Next token is token ')' (13.4: )
+Next token is token ')' ()
 Splitting off stack 1 from 0.
-Reduced stack 1 by rule 13 (line 104); action deferred.  Now in state 21.
+Reduced stack 1 by rule 13 (line 94); action deferred.  Now in state 21.
 Stack 1 Entering state 21
-Next token is token ')' (13.4: )
-Reduced stack 0 by rule 7 (line 90); action deferred.  Now in state 20.
+Next token is token ')' ()
+Reduced stack 0 by rule 7 (line 80); action deferred.  Now in state 20.
 Stack 0 Entering state 20
-Next token is token ')' (13.4: )
+Next token is token ')' ()
 Stack 1 Entering state 21
-Next token is token ')' (13.4: )
-On stack 0, shifting token ')' (13.4: )
+Next token is token ')' ()
+On stack 0, shifting token ')' ()
 Stack 0 now in state 27
-On stack 1, shifting token ')' (13.4: )
+On stack 1, shifting token ')' ()
 Stack 1 now in state 28
 Stack 0 Entering state 27
-Reduced stack 0 by rule 8 (line 91); action deferred.  Now in state 8.
+Reduced stack 0 by rule 8 (line 81); action deferred.  Now in state 8.
 Stack 0 Entering state 8
 Reading a token
-Next token is token ';' (13.5: )
+Next token is token ';' ()
 Stack 1 Entering state 28
-Reduced stack 1 by rule 14 (line 105); action deferred.  Now in state 13.
+Reduced stack 1 by rule 14 (line 95); action deferred.  Now in state 13.
 Stack 1 Entering state 13
-Next token is token ';' (13.5: )
-On stack 0, shifting token ';' (13.5: )
+Next token is token ';' ()
+On stack 0, shifting token ';' ()
 Stack 0 now in state 16
-On stack 1, shifting token ';' (13.5: )
+On stack 1, shifting token ';' ()
 Stack 1 now in state 23
 Stack 0 Entering state 16
-Reduced stack 0 by rule 3 (line 84); action deferred.  Now in state 7.
+Reduced stack 0 by rule 3 (line 74); action deferred.  Now in state 7.
 Stack 0 Entering state 7
-Reduced stack 0 by rule 2 (line 72); action deferred.  Now in state 1.
+Reduced stack 0 by rule 2 (line 65); action deferred.  Now in state 1.
 Stack 0 Entering state 1
 Reading a token
-Next token is token TYPENAME (15.0: )
+Next token is token TYPENAME ()
 Stack 1 Entering state 23
-Reduced stack 1 by rule 11 (line 97); action deferred.  Now in state 9.
+Reduced stack 1 by rule 11 (line 87); action deferred.  Now in state 9.
 Stack 1 Entering state 9
-Reduced stack 1 by rule 4 (line 85); action deferred.  Now in state 7.
+Reduced stack 1 by rule 4 (line 75); action deferred.  Now in state 7.
 Stack 1 Entering state 7
-Reduced stack 1 by rule 2 (line 72); action deferred.  Now in state 1.
+Reduced stack 1 by rule 2 (line 65); action deferred.  Now in state 1.
 Merging stack 1 into stack 0.
 Removing dead stacks.
-On stack 0, shifting token TYPENAME (15.0: )
+On stack 0, shifting token TYPENAME ()
 Stack 0 now in state 4
-Reducing stack -1 by rule 13 (line 104):
-   $1 = token ID (13.3: )
--> $$ = nterm declarator (13.3: )
-Reducing stack -1 by rule 14 (line 105):
-   $1 = token '(' (13.2: )
-   $2 = nterm declarator (13.3: )
-   $3 = token ')' (13.4: )
--> $$ = nterm declarator (13.2-4: )
-Reducing stack -1 by rule 11 (line 97):
-   $1 = token TYPENAME (13.0: )
-   $2 = nterm declarator (13.2-4: )
-   $3 = token ';' (13.5: )
--> $$ = nterm decl (13.0-5: )
-Reducing stack -1 by rule 4 (line 85):
-   $1 = nterm decl (13.0-5: )
--> $$ = nterm stmt (13.0-5: )
-Reducing stack -1 by rule 7 (line 90):
-   $1 = token ID (13.3: )
--> $$ = nterm expr (13.3: )
-Reducing stack -1 by rule 8 (line 91):
-   $1 = token TYPENAME (13.0: )
-   $2 = token '(' (13.2: )
-   $3 = nterm expr (13.3: )
-   $4 = token ')' (13.4: )
--> $$ = nterm expr (13.0-4: )
-Reducing stack -1 by rule 3 (line 84):
-   $1 = nterm expr (13.0-4: )
-   $2 = token ';' (13.5: )
--> $$ = nterm stmt (13.0-5: )
-Reducing stack -1 by rule 2 (line 72):
-   $1 = nterm prog (1.1-11.9: )
-   $2 = nterm stmt (13.0-5: )
--> $$ = nterm prog (1.1-13.5: )
+Reducing stack -1 by rule 13 (line 94):
+   $1 = token ID ()
+-> $$ = nterm declarator ()
+Reducing stack -1 by rule 14 (line 95):
+   $1 = token '(' ()
+   $2 = nterm declarator ()
+   $3 = token ')' ()
+-> $$ = nterm declarator ()
+Reducing stack -1 by rule 11 (line 87):
+   $1 = token TYPENAME ()
+   $2 = nterm declarator ()
+   $3 = token ';' ()
+-> $$ = nterm decl ()
+Reducing stack -1 by rule 4 (line 75):
+   $1 = nterm decl ()
+-> $$ = nterm stmt ()
+Reducing stack -1 by rule 7 (line 80):
+   $1 = token ID ()
+-> $$ = nterm expr ()
+Reducing stack -1 by rule 8 (line 81):
+   $1 = token TYPENAME ()
+   $2 = token '(' ()
+   $3 = nterm expr ()
+   $4 = token ')' ()
+-> $$ = nterm expr ()
+Reducing stack -1 by rule 3 (line 74):
+   $1 = nterm expr ()
+   $2 = token ';' ()
+-> $$ = nterm stmt ()
+Reducing stack -1 by rule 2 (line 65):
+   $1 = nterm prog ()
+   $2 = nterm stmt ()
+-> $$ = nterm prog ()
 Returning to deterministic operation.
 Entering state 4
 Reading a token
-Next token is token '(' (15.2: )
-Shifting token '(' (15.2: )
+Next token is token '(' ()
+Shifting token '(' ()
 Entering state 12
 Reading a token
-Next token is token ID (15.3: )
-Shifting token ID (15.3: )
+Next token is token ID ()
+Shifting token ID ()
 Entering state 18
 Reading a token
-Next token is token ')' (15.4: )
+Next token is token ')' ()
 Stack 0 Entering state 18
-Next token is token ')' (15.4: )
+Next token is token ')' ()
 Splitting off stack 1 from 0.
-Reduced stack 1 by rule 13 (line 104); action deferred.  Now in state 21.
+Reduced stack 1 by rule 13 (line 94); action deferred.  Now in state 21.
 Stack 1 Entering state 21
-Next token is token ')' (15.4: )
-Reduced stack 0 by rule 7 (line 90); action deferred.  Now in state 20.
+Next token is token ')' ()
+Reduced stack 0 by rule 7 (line 80); action deferred.  Now in state 20.
 Stack 0 Entering state 20
-Next token is token ')' (15.4: )
+Next token is token ')' ()
 Stack 1 Entering state 21
-Next token is token ')' (15.4: )
-On stack 0, shifting token ')' (15.4: )
+Next token is token ')' ()
+On stack 0, shifting token ')' ()
 Stack 0 now in state 27
-On stack 1, shifting token ')' (15.4: )
+On stack 1, shifting token ')' ()
 Stack 1 now in state 28
 Stack 0 Entering state 27
-Reduced stack 0 by rule 8 (line 91); action deferred.  Now in state 8.
+Reduced stack 0 by rule 8 (line 81); action deferred.  Now in state 8.
 Stack 0 Entering state 8
 Reading a token
-Next token is token '=' (15.6: )
+Next token is token '=' ()
 Stack 1 Entering state 28
-Reduced stack 1 by rule 14 (line 105); action deferred.  Now in state 13.
+Reduced stack 1 by rule 14 (line 95); action deferred.  Now in state 13.
 Stack 1 Entering state 13
-Next token is token '=' (15.6: )
-On stack 0, shifting token '=' (15.6: )
+Next token is token '=' ()
+On stack 0, shifting token '=' ()
 Stack 0 now in state 14
-On stack 1, shifting token '=' (15.6: )
+On stack 1, shifting token '=' ()
 Stack 1 now in state 22
 Stack 0 Entering state 14
 Reading a token
-Next token is token ID (15.8: )
+Next token is token ID ()
 Stack 1 Entering state 22
-Next token is token ID (15.8: )
-On stack 0, shifting token ID (15.8: )
+Next token is token ID ()
+On stack 0, shifting token ID ()
 Stack 0 now in state 5
-On stack 1, shifting token ID (15.8: )
+On stack 1, shifting token ID ()
 Stack 1 now in state 5
 Stack 0 Entering state 5
-Reduced stack 0 by rule 7 (line 90); action deferred.  Now in state 24.
+Reduced stack 0 by rule 7 (line 80); action deferred.  Now in state 24.
 Stack 0 Entering state 24
 Reading a token
-Next token is token '+' (15.10: )
+Next token is token '+' ()
 Stack 1 Entering state 5
-Reduced stack 1 by rule 7 (line 90); action deferred.  Now in state 29.
+Reduced stack 1 by rule 7 (line 80); action deferred.  Now in state 29.
 Stack 1 Entering state 29
-Next token is token '+' (15.10: )
-On stack 0, shifting token '+' (15.10: )
+Next token is token '+' ()
+On stack 0, shifting token '+' ()
 Stack 0 now in state 15
-On stack 1, shifting token '+' (15.10: )
+On stack 1, shifting token '+' ()
 Stack 1 now in state 15
 Stack 0 Entering state 15
 Reading a token
-Next token is token ID (15.12: )
+Next token is token ID ()
 Stack 1 Entering state 15
-Next token is token ID (15.12: )
-On stack 0, shifting token ID (15.12: )
+Next token is token ID ()
+On stack 0, shifting token ID ()
 Stack 0 now in state 5
-On stack 1, shifting token ID (15.12: )
+On stack 1, shifting token ID ()
 Stack 1 now in state 5
 Stack 0 Entering state 5
-Reduced stack 0 by rule 7 (line 90); action deferred.  Now in state 25.
+Reduced stack 0 by rule 7 (line 80); action deferred.  Now in state 25.
 Stack 0 Entering state 25
-Reduced stack 0 by rule 9 (line 93); action deferred.  Now in state 24.
+Reduced stack 0 by rule 9 (line 83); action deferred.  Now in state 24.
 Stack 0 Entering state 24
 Reading a token
-Next token is token ';' (15.13: )
-Reduced stack 0 by rule 10 (line 94); action deferred.  Now in state 8.
+Next token is token ';' ()
+Reduced stack 0 by rule 10 (line 84); action deferred.  Now in state 8.
 Stack 0 Entering state 8
-Next token is token ';' (15.13: )
+Next token is token ';' ()
 Stack 1 Entering state 5
-Reduced stack 1 by rule 7 (line 90); action deferred.  Now in state 25.
+Reduced stack 1 by rule 7 (line 80); action deferred.  Now in state 25.
 Stack 1 Entering state 25
-Reduced stack 1 by rule 9 (line 93); action deferred.  Now in state 29.
+Reduced stack 1 by rule 9 (line 83); action deferred.  Now in state 29.
 Stack 1 Entering state 29
-Next token is token ';' (15.13: )
-On stack 0, shifting token ';' (15.13: )
+Next token is token ';' ()
+On stack 0, shifting token ';' ()
 Stack 0 now in state 16
-On stack 1, shifting token ';' (15.13: )
+On stack 1, shifting token ';' ()
 Stack 1 now in state 30
 Stack 0 Entering state 16
-Reduced stack 0 by rule 3 (line 84); action deferred.  Now in state 7.
+Reduced stack 0 by rule 3 (line 74); action deferred.  Now in state 7.
 Stack 0 Entering state 7
-Reduced stack 0 by rule 2 (line 72); action deferred.  Now in state 1.
+Reduced stack 0 by rule 2 (line 65); action deferred.  Now in state 1.
 Stack 0 Entering state 1
 Reading a token
-Next token is token TYPENAME (17.0: )
+Next token is token TYPENAME ()
 Stack 1 Entering state 30
-Reduced stack 1 by rule 12 (line 99); action deferred.  Now in state 9.
+Reduced stack 1 by rule 12 (line 89); action deferred.  Now in state 9.
 Stack 1 Entering state 9
-Reduced stack 1 by rule 4 (line 85); action deferred.  Now in state 7.
+Reduced stack 1 by rule 4 (line 75); action deferred.  Now in state 7.
 Stack 1 Entering state 7
-Reduced stack 1 by rule 2 (line 72); action deferred.  Now in state 1.
+Reduced stack 1 by rule 2 (line 65); action deferred.  Now in state 1.
 Merging stack 1 into stack 0.
 Removing dead stacks.
-On stack 0, shifting token TYPENAME (17.0: )
+On stack 0, shifting token TYPENAME ()
 Stack 0 now in state 4
-Reducing stack -1 by rule 13 (line 104):
-   $1 = token ID (15.3: )
--> $$ = nterm declarator (15.3: )
-Reducing stack -1 by rule 14 (line 105):
-   $1 = token '(' (15.2: )
-   $2 = nterm declarator (15.3: )
-   $3 = token ')' (15.4: )
--> $$ = nterm declarator (15.2-4: )
-Reducing stack -1 by rule 7 (line 90):
-   $1 = token ID (15.8: )
--> $$ = nterm expr (15.8: )
-Reducing stack -1 by rule 7 (line 90):
-   $1 = token ID (15.12: )
--> $$ = nterm expr (15.12: )
-Reducing stack -1 by rule 9 (line 93):
-   $1 = nterm expr (15.8: )
-   $2 = token '+' (15.10: )
-   $3 = nterm expr (15.12: )
--> $$ = nterm expr (15.8-12: )
-Reducing stack -1 by rule 12 (line 99):
-   $1 = token TYPENAME (15.0: )
-   $2 = nterm declarator (15.2-4: )
-   $3 = token '=' (15.6: )
-   $4 = nterm expr (15.8-12: )
-   $5 = token ';' (15.13: )
--> $$ = nterm decl (15.0-13: )
-Reducing stack -1 by rule 4 (line 85):
-   $1 = nterm decl (15.0-13: )
--> $$ = nterm stmt (15.0-13: )
-Reducing stack -1 by rule 7 (line 90):
-   $1 = token ID (15.3: )
--> $$ = nterm expr (15.3: )
-Reducing stack -1 by rule 8 (line 91):
-   $1 = token TYPENAME (15.0: )
-   $2 = token '(' (15.2: )
-   $3 = nterm expr (15.3: )
-   $4 = token ')' (15.4: )
--> $$ = nterm expr (15.0-4: )
-Reducing stack -1 by rule 7 (line 90):
-   $1 = token ID (15.8: )
--> $$ = nterm expr (15.8: )
-Reducing stack -1 by rule 7 (line 90):
-   $1 = token ID (15.12: )
--> $$ = nterm expr (15.12: )
-Reducing stack -1 by rule 9 (line 93):
-   $1 = nterm expr (15.8: )
-   $2 = token '+' (15.10: )
-   $3 = nterm expr (15.12: )
--> $$ = nterm expr (15.8-12: )
-Reducing stack -1 by rule 10 (line 94):
-   $1 = nterm expr (15.0-4: )
-   $2 = token '=' (15.6: )
-   $3 = nterm expr (15.8-12: )
--> $$ = nterm expr (15.0-12: )
-Reducing stack -1 by rule 3 (line 84):
-   $1 = nterm expr (15.0-12: )
-   $2 = token ';' (15.13: )
--> $$ = nterm stmt (15.0-13: )
-Reducing stack -1 by rule 2 (line 72):
-   $1 = nterm prog (1.1-13.5: )
-   $2 = nterm stmt (15.0-13: )
--> $$ = nterm prog (1.1-15.13: )
+Reducing stack -1 by rule 13 (line 94):
+   $1 = token ID ()
+-> $$ = nterm declarator ()
+Reducing stack -1 by rule 14 (line 95):
+   $1 = token '(' ()
+   $2 = nterm declarator ()
+   $3 = token ')' ()
+-> $$ = nterm declarator ()
+Reducing stack -1 by rule 7 (line 80):
+   $1 = token ID ()
+-> $$ = nterm expr ()
+Reducing stack -1 by rule 7 (line 80):
+   $1 = token ID ()
+-> $$ = nterm expr ()
+Reducing stack -1 by rule 9 (line 83):
+   $1 = nterm expr ()
+   $2 = token '+' ()
+   $3 = nterm expr ()
+-> $$ = nterm expr ()
+Reducing stack -1 by rule 12 (line 89):
+   $1 = token TYPENAME ()
+   $2 = nterm declarator ()
+   $3 = token '=' ()
+   $4 = nterm expr ()
+   $5 = token ';' ()
+-> $$ = nterm decl ()
+Reducing stack -1 by rule 4 (line 75):
+   $1 = nterm decl ()
+-> $$ = nterm stmt ()
+Reducing stack -1 by rule 7 (line 80):
+   $1 = token ID ()
+-> $$ = nterm expr ()
+Reducing stack -1 by rule 8 (line 81):
+   $1 = token TYPENAME ()
+   $2 = token '(' ()
+   $3 = nterm expr ()
+   $4 = token ')' ()
+-> $$ = nterm expr ()
+Reducing stack -1 by rule 7 (line 80):
+   $1 = token ID ()
+-> $$ = nterm expr ()
+Reducing stack -1 by rule 7 (line 80):
+   $1 = token ID ()
+-> $$ = nterm expr ()
+Reducing stack -1 by rule 9 (line 83):
+   $1 = nterm expr ()
+   $2 = token '+' ()
+   $3 = nterm expr ()
+-> $$ = nterm expr ()
+Reducing stack -1 by rule 10 (line 84):
+   $1 = nterm expr ()
+   $2 = token '=' ()
+   $3 = nterm expr ()
+-> $$ = nterm expr ()
+Reducing stack -1 by rule 3 (line 74):
+   $1 = nterm expr ()
+   $2 = token ';' ()
+-> $$ = nterm stmt ()
+Reducing stack -1 by rule 2 (line 65):
+   $1 = nterm prog ()
+   $2 = nterm stmt ()
+-> $$ = nterm prog ()
 Returning to deterministic operation.
 Entering state 4
 Reading a token
-Next token is token '(' (17.2: )
-Shifting token '(' (17.2: )
+Next token is token '(' ()
+Shifting token '(' ()
 Entering state 12
 Reading a token
-Next token is token ID (17.3: )
-Shifting token ID (17.3: )
+Next token is token ID ()
+Shifting token ID ()
 Entering state 18
 Reading a token
-Next token is token ID (17.5: )
-Reducing stack 0 by rule 7 (line 90):
-   $1 = token ID (17.3: )
--> $$ = nterm expr (17.3: )
+Next token is token ID ()
+Reducing stack 0 by rule 7 (line 80):
+   $1 = token ID ()
+-> $$ = nterm expr ()
 Entering state 20
-Next token is token ID (17.5: )
-17.5: syntax error
-Error: popping nterm expr (17.3: )
-Error: popping token '(' (17.2: )
-Error: popping token TYPENAME (17.0: )
-Shifting token error (17.0-5: )
+Next token is token ID ()
+syntax error
+Error: popping nterm expr ()
+Error: popping token '(' ()
+Error: popping token TYPENAME ()
+Shifting token error ()
 Entering state 3
-Next token is token ID (17.5: )
-Error: discarding token ID (17.5: )
+Next token is token ID ()
+Error: discarding token ID ()
 Reading a token
-Next token is token ')' (17.6: )
-Error: discarding token ')' (17.6: )
+Next token is token ')' ()
+Error: discarding token ')' ()
 Reading a token
-Next token is token '=' (17.8: )
-Error: discarding token '=' (17.8: )
+Next token is token '=' ()
+Error: discarding token '=' ()
 Reading a token
-Next token is token ID (17.10: )
-Error: discarding token ID (17.10: )
+Next token is token ID ()
+Error: discarding token ID ()
 Reading a token
-Next token is token '+' (17.12: )
-Error: discarding token '+' (17.12: )
+Next token is token '+' ()
+Error: discarding token '+' ()
 Reading a token
-Next token is token ID (17.14: )
-Error: discarding token ID (17.14: )
+Next token is token ID ()
+Error: discarding token ID ()
 Reading a token
-Next token is token ';' (17.15: )
+Next token is token ';' ()
 Entering state 3
-Next token is token ';' (17.15: )
-Shifting token ';' (17.15: )
+Next token is token ';' ()
+Shifting token ';' ()
 Entering state 10
-Reducing stack 0 by rule 5 (line 86):
-   $1 = token error (17.0-14: )
-   $2 = token ';' (17.15: )
--> $$ = nterm stmt (17.0-15: )
+Reducing stack 0 by rule 5 (line 76):
+   $1 = token error ()
+   $2 = token ';' ()
+-> $$ = nterm stmt ()
 Entering state 7
-Reducing stack 0 by rule 2 (line 72):
-   $1 = nterm prog (1.1-15.13: )
-   $2 = nterm stmt (17.0-15: )
--> $$ = nterm prog (1.1-17.15: )
+Reducing stack 0 by rule 2 (line 65):
+   $1 = nterm prog ()
+   $2 = nterm stmt ()
+-> $$ = nterm prog ()
 Entering state 1
 Reading a token
-Next token is token ID (19.0: )
-Shifting token ID (19.0: )
+Next token is token ID ()
+Shifting token ID ()
 Entering state 5
-Reducing stack 0 by rule 7 (line 90):
-   $1 = token ID (19.0: )
--> $$ = nterm expr (19.0: )
+Reducing stack 0 by rule 7 (line 80):
+   $1 = token ID ()
+-> $$ = nterm expr ()
 Entering state 8
 Reading a token
-Next token is token '+' (19.2: )
-Shifting token '+' (19.2: )
+Next token is token '+' ()
+Shifting token '+' ()
 Entering state 15
 Reading a token
-Next token is token ID (19.4: )
-Shifting token ID (19.4: )
+Next token is token ID ()
+Shifting token ID ()
 Entering state 5
-Reducing stack 0 by rule 7 (line 90):
-   $1 = token ID (19.4: )
--> $$ = nterm expr (19.4: )
+Reducing stack 0 by rule 7 (line 80):
+   $1 = token ID ()
+-> $$ = nterm expr ()
 Entering state 25
-Reducing stack 0 by rule 9 (line 93):
-   $1 = nterm expr (19.0: )
-   $2 = token '+' (19.2: )
-   $3 = nterm expr (19.4: )
--> $$ = nterm expr (19.0-4: )
+Reducing stack 0 by rule 9 (line 83):
+   $1 = nterm expr ()
+   $2 = token '+' ()
+   $3 = nterm expr ()
+-> $$ = nterm expr ()
 Entering state 8
 Reading a token
-Next token is token ';' (19.5: )
-Shifting token ';' (19.5: )
+Next token is token ';' ()
+Shifting token ';' ()
 Entering state 16
-Reducing stack 0 by rule 3 (line 84):
-   $1 = nterm expr (19.0-4: )
-   $2 = token ';' (19.5: )
--> $$ = nterm stmt (19.0-5: )
+Reducing stack 0 by rule 3 (line 74):
+   $1 = nterm expr ()
+   $2 = token ';' ()
+-> $$ = nterm stmt ()
 Entering state 7
-Reducing stack 0 by rule 2 (line 72):
-   $1 = nterm prog (1.1-17.15: )
-   $2 = nterm stmt (19.0-5: )
--> $$ = nterm prog (1.1-19.5: )
+Reducing stack 0 by rule 2 (line 65):
+   $1 = nterm prog ()
+   $2 = nterm stmt ()
+-> $$ = nterm prog ()
 Entering state 1
 Reading a token
-Next token is token '@' (21.0: )
-Shifting token '@' (21.0: )
+Next token is token '@' ()
+Shifting token '@' ()
 Entering state 6
-Reducing stack 0 by rule 6 (line 87):
-   $1 = token '@' (21.0: )
-Cleanup: popping nterm prog (1.1-19.5: )
-712. cxx-type.at:449:  ok
+Reducing stack 0 by rule 6 (line 77):
+   $1 = token '@' ()
+Cleanup: popping nterm prog ()
+711. cxx-type.at:444:  ok
+
 
-715. glr-regression.at:206: testing Badly Collapsed GLR States: glr.cc ...
-./glr-regression.at:206: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o glr-regr1.cc glr-regr1.y
-./glr-regression.at:206: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr1 glr-regr1.cc $LIBS
-stderr:
-stdout:
-./c++.at:1363:  $PREPARSER ./input aaaas
-stderr:
-exception caught: reduction
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input aaaal
-stderr:
-exception caught: yylex
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input i
-stderr:
-exception caught: initial-action
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input aaaap
-stderr:
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input --debug aaaap
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-0xffffc8517b80->Object::Object { }
-0xffffc8517c70->Object::Object { 0xffffc8517b80 }
-0xffffc8517b80->Object::~Object { 0xffffc8517b80, 0xffffc8517c70 }
-Next token is token 'a' (0xffffc8517c70 'a')
-0xffffc8517b90->Object::Object { 0xffffc8517c70 }
-0xffffc8517c70->Object::~Object { 0xffffc8517b90, 0xffffc8517c70 }
-Shifting token 'a' (0xffffc8517b90 'a')
-0xaaab016bdee0->Object::Object { 0xffffc8517b90 }
-0xffffc8517b90->Object::~Object { 0xaaab016bdee0, 0xffffc8517b90 }
-Entering state 1
-Stack now 0 1
-0xffffc8517c90->Object::Object { 0xaaab016bdee0 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab016bdee0 'a')
--> $$ = nterm item (0xffffc8517c90 'a')
-0xaaab016bdee0->Object::~Object { 0xaaab016bdee0, 0xffffc8517c90 }
-0xaaab016bdee0->Object::Object { 0xffffc8517c90 }
-0xffffc8517c90->Object::~Object { 0xaaab016bdee0, 0xffffc8517c90 }
-Entering state 10
-Stack now 0 10
-Reading a token
-0xffffc8517b80->Object::Object { 0xaaab016bdee0 }
-0xffffc8517c70->Object::Object { 0xaaab016bdee0, 0xffffc8517b80 }
-0xffffc8517b80->Object::~Object { 0xaaab016bdee0, 0xffffc8517b80, 0xffffc8517c70 }
-Next token is token 'a' (0xffffc8517c70 'a')
-0xffffc8517b90->Object::Object { 0xaaab016bdee0, 0xffffc8517c70 }
-0xffffc8517c70->Object::~Object { 0xaaab016bdee0, 0xffffc8517b90, 0xffffc8517c70 }
-Shifting token 'a' (0xffffc8517b90 'a')
-0xaaab016bdf00->Object::Object { 0xaaab016bdee0, 0xffffc8517b90 }
-0xffffc8517b90->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xffffc8517b90 }
-Entering state 1
-Stack now 0 10 1
-0xffffc8517c90->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab016bdf00 'a')
--> $$ = nterm item (0xffffc8517c90 'a')
-0xaaab016bdf00->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xffffc8517c90 }
-0xaaab016bdf00->Object::Object { 0xaaab016bdee0, 0xffffc8517c90 }
-0xffffc8517c90->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xffffc8517c90 }
-Entering state 10
-Stack now 0 10 10
-Reading a token
-0xffffc8517b80->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00 }
-0xffffc8517c70->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xffffc8517b80 }
-0xffffc8517b80->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xffffc8517b80, 0xffffc8517c70 }
-Next token is token 'a' (0xffffc8517c70 'a')
-0xffffc8517b90->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xffffc8517c70 }
-0xffffc8517c70->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xffffc8517b90, 0xffffc8517c70 }
-Shifting token 'a' (0xffffc8517b90 'a')
-0xaaab016bdf20->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xffffc8517b90 }
-0xffffc8517b90->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xffffc8517b90 }
-Entering state 1
-Stack now 0 10 10 1
-0xffffc8517c90->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab016bdf20 'a')
--> $$ = nterm item (0xffffc8517c90 'a')
-0xaaab016bdf20->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xffffc8517c90 }
-0xaaab016bdf20->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xffffc8517c90 }
-0xffffc8517c90->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xffffc8517c90 }
-Entering state 10
-Stack now 0 10 10 10
-Reading a token
-0xffffc8517b80->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20 }
-0xffffc8517c70->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xffffc8517b80 }
-0xffffc8517b80->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xffffc8517b80, 0xffffc8517c70 }
-Next token is token 'a' (0xffffc8517c70 'a')
-0xffffc8517b90->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xffffc8517c70 }
-0xffffc8517c70->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xffffc8517b90, 0xffffc8517c70 }
-Shifting token 'a' (0xffffc8517b90 'a')
-0xaaab016bdf40->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xffffc8517b90 }
-0xffffc8517b90->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xaaab016bdf40, 0xffffc8517b90 }
-Entering state 1
-Stack now 0 10 10 10 1
-0xffffc8517c90->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xaaab016bdf40 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab016bdf40 'a')
--> $$ = nterm item (0xffffc8517c90 'a')
-0xaaab016bdf40->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xaaab016bdf40, 0xffffc8517c90 }
-0xaaab016bdf40->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xffffc8517c90 }
-0xffffc8517c90->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xaaab016bdf40, 0xffffc8517c90 }
-Entering state 10
-Stack now 0 10 10 10 10
-Reading a token
-0xffffc8517b80->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xaaab016bdf40 }
-0xffffc8517c70->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xaaab016bdf40, 0xffffc8517b80 }
-0xffffc8517b80->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xaaab016bdf40, 0xffffc8517b80, 0xffffc8517c70 }
-Next token is token 'p' (0xffffc8517c70 'p'Exception caught: cleaning lookahead and stack
-0xaaab016bdf40->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xaaab016bdf40, 0xffffc8517c70 }
-0xaaab016bdf20->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xffffc8517c70 }
-0xaaab016bdf00->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xffffc8517c70 }
-0xaaab016bdee0->Object::~Object { 0xaaab016bdee0, 0xffffc8517c70 }
-0xffffc8517c70->Object::~Object { 0xffffc8517c70 }
-exception caught: printer
-end { }
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-Starting parse
-Entering state 0
-Stack now 0
-Reading a token
-0xffffc8517b80->Object::Object { }
-0xffffc8517c70->Object::Object { 0xffffc8517b80 }
-0xffffc8517b80->Object::~Object { 0xffffc8517b80, 0xffffc8517c70 }
-Next token is token 'a' (0xffffc8517c70 'a')
-0xffffc8517b90->Object::Object { 0xffffc8517c70 }
-0xffffc8517c70->Object::~Object { 0xffffc8517b90, 0xffffc8517c70 }
-Shifting token 'a' (0xffffc8517b90 'a')
-0xaaab016bdee0->Object::Object { 0xffffc8517b90 }
-0xffffc8517b90->Object::~Object { 0xaaab016bdee0, 0xffffc8517b90 }
-Entering state 1
-Stack now 0 1
-0xffffc8517c90->Object::Object { 0xaaab016bdee0 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab016bdee0 'a')
--> $$ = nterm item (0xffffc8517c90 'a')
-0xaaab016bdee0->Object::~Object { 0xaaab016bdee0, 0xffffc8517c90 }
-0xaaab016bdee0->Object::Object { 0xffffc8517c90 }
-0xffffc8517c90->Object::~Object { 0xaaab016bdee0, 0xffffc8517c90 }
-Entering state 10
-Stack now 0 10
-Reading a token
-0xffffc8517b80->Object::Object { 0xaaab016bdee0 }
-0xffffc8517c70->Object::Object { 0xaaab016bdee0, 0xffffc8517b80 }
-0xffffc8517b80->Object::~Object { 0xaaab016bdee0, 0xffffc8517b80, 0xffffc8517c70 }
-Next token is token 'a' (0xffffc8517c70 'a')
-0xffffc8517b90->Object::Object { 0xaaab016bdee0, 0xffffc8517c70 }
-0xffffc8517c70->Object::~Object { 0xaaab016bdee0, 0xffffc8517b90, 0xffffc8517c70 }
-Shifting token 'a' (0xffffc8517b90 'a')
-0xaaab016bdf00->Object::Object { 0xaaab016bdee0, 0xffffc8517b90 }
-0xffffc8517b90->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xffffc8517b90 }
-Entering state 1
-Stack now 0 10 1
-0xffffc8517c90->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab016bdf00 'a')
--> $$ = nterm item (0xffffc8517c90 'a')
-0xaaab016bdf00->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xffffc8517c90 }
-0xaaab016bdf00->Object::Object { 0xaaab016bdee0, 0xffffc8517c90 }
-0xffffc8517c90->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xffffc8517c90 }
-Entering state 10
-Stack now 0 10 10
-Reading a token
-0xffffc8517b80->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00 }
-0xffffc8517c70->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xffffc8517b80 }
-0xffffc8517b80->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xffffc8517b80, 0xffffc8517c70 }
-Next token is token 'a' (0xffffc8517c70 'a')
-0xffffc8517b90->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xffffc8517c70 }
-0xffffc8517c70->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xffffc8517b90, 0xffffc8517c70 }
-Shifting token 'a' (0xffffc8517b90 'a')
-0xaaab016bdf20->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xffffc8517b90 }
-0xffffc8517b90->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xffffc8517b90 }
-Entering state 1
-Stack now 0 10 10 1
-0xffffc8517c90->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab016bdf20 'a')
--> $$ = nterm item (0xffffc8517c90 'a')
-0xaaab016bdf20->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xffffc8517c90 }
-0xaaab016bdf20->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xffffc8517c90 }
-0xffffc8517c90->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xffffc8517c90 }
-Entering state 10
-Stack now 0 10 10 10
-Reading a token
-0xffffc8517b80->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20 }
-0xffffc8517c70->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xffffc8517b80 }
-0xffffc8517b80->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xffffc8517b80, 0xffffc8517c70 }
-Next token is token 'a' (0xffffc8517c70 'a')
-0xffffc8517b90->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xffffc8517c70 }
-0xffffc8517c70->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xffffc8517b90, 0xffffc8517c70 }
-Shifting token 'a' (0xffffc8517b90 'a')
-0xaaab016bdf40->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xffffc8517b90 }
-0xffffc8517b90->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xaaab016bdf40, 0xffffc8517b90 }
-Entering state 1
-Stack now 0 10 10 10 1
-0xffffc8517c90->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xaaab016bdf40 }
-Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab016bdf40 'a')
--> $$ = nterm item (0xffffc8517c90 'a')
-0xaaab016bdf40->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xaaab016bdf40, 0xffffc8517c90 }
-0xaaab016bdf40->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xffffc8517c90 }
-0xffffc8517c90->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xaaab016bdf40, 0xffffc8517c90 }
-Entering state 10
-Stack now 0 10 10 10 10
-Reading a token
-0xffffc8517b80->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xaaab016bdf40 }
-0xffffc8517c70->Object::Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xaaab016bdf40, 0xffffc8517b80 }
-0xffffc8517b80->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xaaab016bdf40, 0xffffc8517b80, 0xffffc8517c70 }
-Next token is token 'p' (0xffffc8517c70 'p'Exception caught: cleaning lookahead and stack
-0xaaab016bdf40->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xaaab016bdf40, 0xffffc8517c70 }
-0xaaab016bdf20->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xaaab016bdf20, 0xffffc8517c70 }
-0xaaab016bdf00->Object::~Object { 0xaaab016bdee0, 0xaaab016bdf00, 0xffffc8517c70 }
-0xaaab016bdee0->Object::~Object { 0xaaab016bdee0, 0xffffc8517c70 }
-0xffffc8517c70->Object::~Object { 0xffffc8517c70 }
-exception caught: printer
-end { }
-./c++.at:1363: grep '^exception caught: printer$' stderr
-stdout:
-exception caught: printer
-./c++.at:1363:  $PREPARSER ./input aaaae
-stderr:
-exception caught: syntax error
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input aaaaE
-stderr:
-exception caught: syntax error, unexpected end of file, expecting 'a'
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input aaaaT
-stderr:
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1363:  $PREPARSER ./input aaaaR
-stderr:
-./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:1363: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./c++.at:859:  $PREPARSER ./input
+./c++.at:1555:  $PREPARSER ./test
 stderr:
-./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
-./c++.at:859: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./glr-regression.at:205:  $PREPARSER ./glr-regr1 BPBPB
-stderr:
-./glr-regression.at:205: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-714. glr-regression.at:205:  ok
-
+./c++.at:1555: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o check check.cc $LIBS
+715. glr-regression.at:206: testing Badly Collapsed GLR States: glr.cc ...
+./glr-regression.at:206: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o glr-regr1.cc glr-regr1.y
+./glr-regression.at:206: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr1 glr-regr1.cc $LIBS
+716. glr-regression.at:207: testing Badly Collapsed GLR States: glr2.cc ...
+./glr-regression.at:207: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o glr-regr1.cc glr-regr1.y
+./glr-regression.at:207: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr1 glr-regr1.cc $LIBS
 stderr:
 stdout:
 ./cxx-type.at:458:  $PREPARSER ./types test-input
@@ -268242,8 +268044,6 @@
 syntax error, unexpected ID, expecting '=' or '+' or ')'
 ./cxx-type.at:458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./cxx-type.at:458:  $PREPARSER ./types -p test-input
-716. glr-regression.at:207: testing Badly Collapsed GLR States: glr2.cc ...
-./glr-regression.at:207: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o glr-regr1.cc glr-regr1.y
 stderr:
 Starting parse
 Entering state 0
@@ -268855,6 +268655,9 @@
 Cleanup: popping nterm prog ()
 ./cxx-type.at:458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stdout:
+stderr:
+./c++.at:1555: ./check
 Starting parse
 Entering state 0
 Reducing stack 0 by rule 1 (line 64):
@@ -269464,76 +269267,19 @@
    $1 = token '@' ()
 Cleanup: popping nterm prog ()
 713. cxx-type.at:455:  ok
-./glr-regression.at:207: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr1 glr-regr1.cc $LIBS
+./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -fcaret -o test.cc test.y
 
 717. glr-regression.at:354: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr.c ...
 ./glr-regression.at:354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr2a.c glr-regr2a.y
+./c++.at:1555: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o test test.cc $LIBS
 ./glr-regression.at:354: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr2a glr-regr2a.c $LIBS
 stderr:
 stdout:
-./c++.at:1066:  $PREPARSER ./input < in
-stderr:
-error: invalid expression
-caught error
-error: invalid character
-caught error
-./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1066:  $PREPARSER ./input < in
-stderr:
-error: invalid expression
-./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./c++.at:1066:  $PREPARSER ./input < in
-stderr:
-error: invalid character
-./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-684. c++.at:1066:  ok
-
-718. glr-regression.at:355: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr.cc ...
-./glr-regression.at:355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr2a.cc glr-regr2a.y
-./glr-regression.at:355: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.cc $LIBS
-stderr:
-stdout:
-./c++.at:1555:  $PREPARSER ./test
-stderr:
-./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-691. c++.at:1517:  ok
-
-719. glr-regression.at:356: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr2.cc ...
-./glr-regression.at:356: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr2a.cc glr-regr2a.y
-./glr-regression.at:356: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.cc $LIBS
-stderr:
-stdout:
-./glr-regression.at:206:  $PREPARSER ./glr-regr1 BPBPB
-stderr:
-./glr-regression.at:206: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-715. glr-regression.at:206:  ok
-
-stderr:
-stdout:
 ./c++.at:859:  $PREPARSER ./input
 stderr:
 ./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:859: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-720. glr-regression.at:488: testing Improper merging of GLR delayed action sets: glr.c ...
-./glr-regression.at:488: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr3.c glr-regr3.y
-./glr-regression.at:488: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr3 glr-regr3.c $LIBS
-stderr:
-stdout:
-./glr-regression.at:354:  $PREPARSER ./glr-regr2a input1.txt
-stderr:
-./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./glr-regression.at:354:  $PREPARSER ./glr-regr2a input2.txt
-stderr:
-./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./glr-regression.at:354:  $PREPARSER ./glr-regr2a input3.txt
-stderr:
-./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-717. glr-regression.at:354:  ok
-
-721. glr-regression.at:489: testing Improper merging of GLR delayed action sets: glr.cc ...
-./glr-regression.at:489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr3.cc glr-regr3.y
-./glr-regression.at:489: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr3 glr-regr3.cc $LIBS
 stderr:
 stdout:
 ./c++.at:1362:  $PREPARSER ./input aaaas
@@ -269557,99 +269303,99 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xffffd52b94f0->Object::Object { }
-0xffffd52b95c0->Object::Object { 0xffffd52b94f0 }
-0xffffd52b94f0->Object::~Object { 0xffffd52b94f0, 0xffffd52b95c0 }
-Next token is token 'a' (0xffffd52b95c0 'a')
-0xffffd52b94e0->Object::Object { 0xffffd52b95c0 }
-0xffffd52b95c0->Object::~Object { 0xffffd52b94e0, 0xffffd52b95c0 }
-Shifting token 'a' (0xffffd52b94e0 'a')
-0xaaaae936eee0->Object::Object { 0xffffd52b94e0 }
-0xffffd52b94e0->Object::~Object { 0xaaaae936eee0, 0xffffd52b94e0 }
+0xffffd1c77970->Object::Object { }
+0xffffd1c77a40->Object::Object { 0xffffd1c77970 }
+0xffffd1c77970->Object::~Object { 0xffffd1c77970, 0xffffd1c77a40 }
+Next token is token 'a' (0xffffd1c77a40 'a')
+0xffffd1c77960->Object::Object { 0xffffd1c77a40 }
+0xffffd1c77a40->Object::~Object { 0xffffd1c77960, 0xffffd1c77a40 }
+Shifting token 'a' (0xffffd1c77960 'a')
+0xaaaace978ee0->Object::Object { 0xffffd1c77960 }
+0xffffd1c77960->Object::~Object { 0xaaaace978ee0, 0xffffd1c77960 }
 Entering state 2
 Stack now 0 2
-0xffffd52b95e0->Object::Object { 0xaaaae936eee0 }
+0xffffd1c77a60->Object::Object { 0xaaaace978ee0 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae936eee0 'a')
--> $$ = nterm item (0xffffd52b95e0 'a')
-0xaaaae936eee0->Object::~Object { 0xaaaae936eee0, 0xffffd52b95e0 }
-0xaaaae936eee0->Object::Object { 0xffffd52b95e0 }
-0xffffd52b95e0->Object::~Object { 0xaaaae936eee0, 0xffffd52b95e0 }
+   $1 = token 'a' (0xaaaace978ee0 'a')
+-> $$ = nterm item (0xffffd1c77a60 'a')
+0xaaaace978ee0->Object::~Object { 0xaaaace978ee0, 0xffffd1c77a60 }
+0xaaaace978ee0->Object::Object { 0xffffd1c77a60 }
+0xffffd1c77a60->Object::~Object { 0xaaaace978ee0, 0xffffd1c77a60 }
 Entering state 11
 Stack now 0 11
 Reading a token
-0xffffd52b94f0->Object::Object { 0xaaaae936eee0 }
-0xffffd52b95c0->Object::Object { 0xaaaae936eee0, 0xffffd52b94f0 }
-0xffffd52b94f0->Object::~Object { 0xaaaae936eee0, 0xffffd52b94f0, 0xffffd52b95c0 }
-Next token is token 'a' (0xffffd52b95c0 'a')
-0xffffd52b94e0->Object::Object { 0xaaaae936eee0, 0xffffd52b95c0 }
-0xffffd52b95c0->Object::~Object { 0xaaaae936eee0, 0xffffd52b94e0, 0xffffd52b95c0 }
-Shifting token 'a' (0xffffd52b94e0 'a')
-0xaaaae936ef00->Object::Object { 0xaaaae936eee0, 0xffffd52b94e0 }
-0xffffd52b94e0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xffffd52b94e0 }
+0xffffd1c77970->Object::Object { 0xaaaace978ee0 }
+0xffffd1c77a40->Object::Object { 0xaaaace978ee0, 0xffffd1c77970 }
+0xffffd1c77970->Object::~Object { 0xaaaace978ee0, 0xffffd1c77970, 0xffffd1c77a40 }
+Next token is token 'a' (0xffffd1c77a40 'a')
+0xffffd1c77960->Object::Object { 0xaaaace978ee0, 0xffffd1c77a40 }
+0xffffd1c77a40->Object::~Object { 0xaaaace978ee0, 0xffffd1c77960, 0xffffd1c77a40 }
+Shifting token 'a' (0xffffd1c77960 'a')
+0xaaaace978f00->Object::Object { 0xaaaace978ee0, 0xffffd1c77960 }
+0xffffd1c77960->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xffffd1c77960 }
 Entering state 2
 Stack now 0 11 2
-0xffffd52b95e0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00 }
+0xffffd1c77a60->Object::Object { 0xaaaace978ee0, 0xaaaace978f00 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae936ef00 'a')
--> $$ = nterm item (0xffffd52b95e0 'a')
-0xaaaae936ef00->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xffffd52b95e0 }
-0xaaaae936ef00->Object::Object { 0xaaaae936eee0, 0xffffd52b95e0 }
-0xffffd52b95e0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xffffd52b95e0 }
+   $1 = token 'a' (0xaaaace978f00 'a')
+-> $$ = nterm item (0xffffd1c77a60 'a')
+0xaaaace978f00->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xffffd1c77a60 }
+0xaaaace978f00->Object::Object { 0xaaaace978ee0, 0xffffd1c77a60 }
+0xffffd1c77a60->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xffffd1c77a60 }
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xffffd52b94f0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00 }
-0xffffd52b95c0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xffffd52b94f0 }
-0xffffd52b94f0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xffffd52b94f0, 0xffffd52b95c0 }
-Next token is token 'a' (0xffffd52b95c0 'a')
-0xffffd52b94e0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xffffd52b95c0 }
-0xffffd52b95c0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xffffd52b94e0, 0xffffd52b95c0 }
-Shifting token 'a' (0xffffd52b94e0 'a')
-0xaaaae936ef20->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xffffd52b94e0 }
-0xffffd52b94e0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xffffd52b94e0 }
+0xffffd1c77970->Object::Object { 0xaaaace978ee0, 0xaaaace978f00 }
+0xffffd1c77a40->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xffffd1c77970 }
+0xffffd1c77970->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xffffd1c77970, 0xffffd1c77a40 }
+Next token is token 'a' (0xffffd1c77a40 'a')
+0xffffd1c77960->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xffffd1c77a40 }
+0xffffd1c77a40->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xffffd1c77960, 0xffffd1c77a40 }
+Shifting token 'a' (0xffffd1c77960 'a')
+0xaaaace978f20->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xffffd1c77960 }
+0xffffd1c77960->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xffffd1c77960 }
 Entering state 2
 Stack now 0 11 11 2
-0xffffd52b95e0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20 }
+0xffffd1c77a60->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae936ef20 'a')
--> $$ = nterm item (0xffffd52b95e0 'a')
-0xaaaae936ef20->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xffffd52b95e0 }
-0xaaaae936ef20->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xffffd52b95e0 }
-0xffffd52b95e0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xffffd52b95e0 }
+   $1 = token 'a' (0xaaaace978f20 'a')
+-> $$ = nterm item (0xffffd1c77a60 'a')
+0xaaaace978f20->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xffffd1c77a60 }
+0xaaaace978f20->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xffffd1c77a60 }
+0xffffd1c77a60->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xffffd1c77a60 }
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xffffd52b94f0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20 }
-0xffffd52b95c0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xffffd52b94f0 }
-0xffffd52b94f0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xffffd52b94f0, 0xffffd52b95c0 }
-Next token is token 'a' (0xffffd52b95c0 'a')
-0xffffd52b94e0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xffffd52b95c0 }
-0xffffd52b95c0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xffffd52b94e0, 0xffffd52b95c0 }
-Shifting token 'a' (0xffffd52b94e0 'a')
-0xaaaae936ef40->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xffffd52b94e0 }
-0xffffd52b94e0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xaaaae936ef40, 0xffffd52b94e0 }
+0xffffd1c77970->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20 }
+0xffffd1c77a40->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xffffd1c77970 }
+0xffffd1c77970->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xffffd1c77970, 0xffffd1c77a40 }
+Next token is token 'a' (0xffffd1c77a40 'a')
+0xffffd1c77960->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xffffd1c77a40 }
+0xffffd1c77a40->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xffffd1c77960, 0xffffd1c77a40 }
+Shifting token 'a' (0xffffd1c77960 'a')
+0xaaaace978f40->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xffffd1c77960 }
+0xffffd1c77960->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xaaaace978f40, 0xffffd1c77960 }
 Entering state 2
 Stack now 0 11 11 11 2
-0xffffd52b95e0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xaaaae936ef40 }
+0xffffd1c77a60->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xaaaace978f40 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae936ef40 'a')
--> $$ = nterm item (0xffffd52b95e0 'a')
-0xaaaae936ef40->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xaaaae936ef40, 0xffffd52b95e0 }
-0xaaaae936ef40->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xffffd52b95e0 }
-0xffffd52b95e0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xaaaae936ef40, 0xffffd52b95e0 }
+   $1 = token 'a' (0xaaaace978f40 'a')
+-> $$ = nterm item (0xffffd1c77a60 'a')
+0xaaaace978f40->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xaaaace978f40, 0xffffd1c77a60 }
+0xaaaace978f40->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xffffd1c77a60 }
+0xffffd1c77a60->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xaaaace978f40, 0xffffd1c77a60 }
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xffffd52b94f0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xaaaae936ef40 }
-0xffffd52b95c0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xaaaae936ef40, 0xffffd52b94f0 }
-0xffffd52b94f0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xaaaae936ef40, 0xffffd52b94f0, 0xffffd52b95c0 }
-Next token is token 'p' (0xffffd52b95c0 'p'Exception caught: cleaning lookahead and stack
-0xaaaae936ef40->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xaaaae936ef40, 0xffffd52b95c0 }
-0xaaaae936ef20->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xffffd52b95c0 }
-0xaaaae936ef00->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xffffd52b95c0 }
-0xaaaae936eee0->Object::~Object { 0xaaaae936eee0, 0xffffd52b95c0 }
-0xffffd52b95c0->Object::~Object { 0xffffd52b95c0 }
+0xffffd1c77970->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xaaaace978f40 }
+0xffffd1c77a40->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xaaaace978f40, 0xffffd1c77970 }
+0xffffd1c77970->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xaaaace978f40, 0xffffd1c77970, 0xffffd1c77a40 }
+Next token is token 'p' (0xffffd1c77a40 'p'Exception caught: cleaning lookahead and stack
+0xaaaace978f40->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xaaaace978f40, 0xffffd1c77a40 }
+0xaaaace978f20->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xffffd1c77a40 }
+0xaaaace978f00->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xffffd1c77a40 }
+0xaaaace978ee0->Object::~Object { 0xaaaace978ee0, 0xffffd1c77a40 }
+0xffffd1c77a40->Object::~Object { 0xffffd1c77a40 }
 exception caught: printer
 end { }
 ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -269658,99 +269404,99 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xffffd52b94f0->Object::Object { }
-0xffffd52b95c0->Object::Object { 0xffffd52b94f0 }
-0xffffd52b94f0->Object::~Object { 0xffffd52b94f0, 0xffffd52b95c0 }
-Next token is token 'a' (0xffffd52b95c0 'a')
-0xffffd52b94e0->Object::Object { 0xffffd52b95c0 }
-0xffffd52b95c0->Object::~Object { 0xffffd52b94e0, 0xffffd52b95c0 }
-Shifting token 'a' (0xffffd52b94e0 'a')
-0xaaaae936eee0->Object::Object { 0xffffd52b94e0 }
-0xffffd52b94e0->Object::~Object { 0xaaaae936eee0, 0xffffd52b94e0 }
+0xffffd1c77970->Object::Object { }
+0xffffd1c77a40->Object::Object { 0xffffd1c77970 }
+0xffffd1c77970->Object::~Object { 0xffffd1c77970, 0xffffd1c77a40 }
+Next token is token 'a' (0xffffd1c77a40 'a')
+0xffffd1c77960->Object::Object { 0xffffd1c77a40 }
+0xffffd1c77a40->Object::~Object { 0xffffd1c77960, 0xffffd1c77a40 }
+Shifting token 'a' (0xffffd1c77960 'a')
+0xaaaace978ee0->Object::Object { 0xffffd1c77960 }
+0xffffd1c77960->Object::~Object { 0xaaaace978ee0, 0xffffd1c77960 }
 Entering state 2
 Stack now 0 2
-0xffffd52b95e0->Object::Object { 0xaaaae936eee0 }
+0xffffd1c77a60->Object::Object { 0xaaaace978ee0 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae936eee0 'a')
--> $$ = nterm item (0xffffd52b95e0 'a')
-0xaaaae936eee0->Object::~Object { 0xaaaae936eee0, 0xffffd52b95e0 }
-0xaaaae936eee0->Object::Object { 0xffffd52b95e0 }
-0xffffd52b95e0->Object::~Object { 0xaaaae936eee0, 0xffffd52b95e0 }
+   $1 = token 'a' (0xaaaace978ee0 'a')
+-> $$ = nterm item (0xffffd1c77a60 'a')
+0xaaaace978ee0->Object::~Object { 0xaaaace978ee0, 0xffffd1c77a60 }
+0xaaaace978ee0->Object::Object { 0xffffd1c77a60 }
+0xffffd1c77a60->Object::~Object { 0xaaaace978ee0, 0xffffd1c77a60 }
 Entering state 11
 Stack now 0 11
 Reading a token
-0xffffd52b94f0->Object::Object { 0xaaaae936eee0 }
-0xffffd52b95c0->Object::Object { 0xaaaae936eee0, 0xffffd52b94f0 }
-0xffffd52b94f0->Object::~Object { 0xaaaae936eee0, 0xffffd52b94f0, 0xffffd52b95c0 }
-Next token is token 'a' (0xffffd52b95c0 'a')
-0xffffd52b94e0->Object::Object { 0xaaaae936eee0, 0xffffd52b95c0 }
-0xffffd52b95c0->Object::~Object { 0xaaaae936eee0, 0xffffd52b94e0, 0xffffd52b95c0 }
-Shifting token 'a' (0xffffd52b94e0 'a')
-0xaaaae936ef00->Object::Object { 0xaaaae936eee0, 0xffffd52b94e0 }
-0xffffd52b94e0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xffffd52b94e0 }
+0xffffd1c77970->Object::Object { 0xaaaace978ee0 }
+0xffffd1c77a40->Object::Object { 0xaaaace978ee0, 0xffffd1c77970 }
+0xffffd1c77970->Object::~Object { 0xaaaace978ee0, 0xffffd1c77970, 0xffffd1c77a40 }
+Next token is token 'a' (0xffffd1c77a40 'a')
+0xffffd1c77960->Object::Object { 0xaaaace978ee0, 0xffffd1c77a40 }
+0xffffd1c77a40->Object::~Object { 0xaaaace978ee0, 0xffffd1c77960, 0xffffd1c77a40 }
+Shifting token 'a' (0xffffd1c77960 'a')
+0xaaaace978f00->Object::Object { 0xaaaace978ee0, 0xffffd1c77960 }
+0xffffd1c77960->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xffffd1c77960 }
 Entering state 2
 Stack now 0 11 2
-0xffffd52b95e0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00 }
+0xffffd1c77a60->Object::Object { 0xaaaace978ee0, 0xaaaace978f00 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae936ef00 'a')
--> $$ = nterm item (0xffffd52b95e0 'a')
-0xaaaae936ef00->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xffffd52b95e0 }
-0xaaaae936ef00->Object::Object { 0xaaaae936eee0, 0xffffd52b95e0 }
-0xffffd52b95e0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xffffd52b95e0 }
+   $1 = token 'a' (0xaaaace978f00 'a')
+-> $$ = nterm item (0xffffd1c77a60 'a')
+0xaaaace978f00->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xffffd1c77a60 }
+0xaaaace978f00->Object::Object { 0xaaaace978ee0, 0xffffd1c77a60 }
+0xffffd1c77a60->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xffffd1c77a60 }
 Entering state 11
 Stack now 0 11 11
 Reading a token
-0xffffd52b94f0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00 }
-0xffffd52b95c0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xffffd52b94f0 }
-0xffffd52b94f0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xffffd52b94f0, 0xffffd52b95c0 }
-Next token is token 'a' (0xffffd52b95c0 'a')
-0xffffd52b94e0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xffffd52b95c0 }
-0xffffd52b95c0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xffffd52b94e0, 0xffffd52b95c0 }
-Shifting token 'a' (0xffffd52b94e0 'a')
-0xaaaae936ef20->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xffffd52b94e0 }
-0xffffd52b94e0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xffffd52b94e0 }
+0xffffd1c77970->Object::Object { 0xaaaace978ee0, 0xaaaace978f00 }
+0xffffd1c77a40->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xffffd1c77970 }
+0xffffd1c77970->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xffffd1c77970, 0xffffd1c77a40 }
+Next token is token 'a' (0xffffd1c77a40 'a')
+0xffffd1c77960->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xffffd1c77a40 }
+0xffffd1c77a40->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xffffd1c77960, 0xffffd1c77a40 }
+Shifting token 'a' (0xffffd1c77960 'a')
+0xaaaace978f20->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xffffd1c77960 }
+0xffffd1c77960->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xffffd1c77960 }
 Entering state 2
 Stack now 0 11 11 2
-0xffffd52b95e0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20 }
+0xffffd1c77a60->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae936ef20 'a')
--> $$ = nterm item (0xffffd52b95e0 'a')
-0xaaaae936ef20->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xffffd52b95e0 }
-0xaaaae936ef20->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xffffd52b95e0 }
-0xffffd52b95e0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xffffd52b95e0 }
+   $1 = token 'a' (0xaaaace978f20 'a')
+-> $$ = nterm item (0xffffd1c77a60 'a')
+0xaaaace978f20->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xffffd1c77a60 }
+0xaaaace978f20->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xffffd1c77a60 }
+0xffffd1c77a60->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xffffd1c77a60 }
 Entering state 11
 Stack now 0 11 11 11
 Reading a token
-0xffffd52b94f0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20 }
-0xffffd52b95c0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xffffd52b94f0 }
-0xffffd52b94f0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xffffd52b94f0, 0xffffd52b95c0 }
-Next token is token 'a' (0xffffd52b95c0 'a')
-0xffffd52b94e0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xffffd52b95c0 }
-0xffffd52b95c0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xffffd52b94e0, 0xffffd52b95c0 }
-Shifting token 'a' (0xffffd52b94e0 'a')
-0xaaaae936ef40->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xffffd52b94e0 }
-0xffffd52b94e0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xaaaae936ef40, 0xffffd52b94e0 }
+0xffffd1c77970->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20 }
+0xffffd1c77a40->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xffffd1c77970 }
+0xffffd1c77970->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xffffd1c77970, 0xffffd1c77a40 }
+Next token is token 'a' (0xffffd1c77a40 'a')
+0xffffd1c77960->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xffffd1c77a40 }
+0xffffd1c77a40->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xffffd1c77960, 0xffffd1c77a40 }
+Shifting token 'a' (0xffffd1c77960 'a')
+0xaaaace978f40->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xffffd1c77960 }
+0xffffd1c77960->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xaaaace978f40, 0xffffd1c77960 }
 Entering state 2
 Stack now 0 11 11 11 2
-0xffffd52b95e0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xaaaae936ef40 }
+0xffffd1c77a60->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xaaaace978f40 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaae936ef40 'a')
--> $$ = nterm item (0xffffd52b95e0 'a')
-0xaaaae936ef40->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xaaaae936ef40, 0xffffd52b95e0 }
-0xaaaae936ef40->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xffffd52b95e0 }
-0xffffd52b95e0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xaaaae936ef40, 0xffffd52b95e0 }
+   $1 = token 'a' (0xaaaace978f40 'a')
+-> $$ = nterm item (0xffffd1c77a60 'a')
+0xaaaace978f40->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xaaaace978f40, 0xffffd1c77a60 }
+0xaaaace978f40->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xffffd1c77a60 }
+0xffffd1c77a60->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xaaaace978f40, 0xffffd1c77a60 }
 Entering state 11
 Stack now 0 11 11 11 11
 Reading a token
-0xffffd52b94f0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xaaaae936ef40 }
-0xffffd52b95c0->Object::Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xaaaae936ef40, 0xffffd52b94f0 }
-0xffffd52b94f0->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xaaaae936ef40, 0xffffd52b94f0, 0xffffd52b95c0 }
-Next token is token 'p' (0xffffd52b95c0 'p'Exception caught: cleaning lookahead and stack
-0xaaaae936ef40->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xaaaae936ef40, 0xffffd52b95c0 }
-0xaaaae936ef20->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xaaaae936ef20, 0xffffd52b95c0 }
-0xaaaae936ef00->Object::~Object { 0xaaaae936eee0, 0xaaaae936ef00, 0xffffd52b95c0 }
-0xaaaae936eee0->Object::~Object { 0xaaaae936eee0, 0xffffd52b95c0 }
-0xffffd52b95c0->Object::~Object { 0xffffd52b95c0 }
+0xffffd1c77970->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xaaaace978f40 }
+0xffffd1c77a40->Object::Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xaaaace978f40, 0xffffd1c77970 }
+0xffffd1c77970->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xaaaace978f40, 0xffffd1c77970, 0xffffd1c77a40 }
+Next token is token 'p' (0xffffd1c77a40 'p'Exception caught: cleaning lookahead and stack
+0xaaaace978f40->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xaaaace978f40, 0xffffd1c77a40 }
+0xaaaace978f20->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xaaaace978f20, 0xffffd1c77a40 }
+0xaaaace978f00->Object::~Object { 0xaaaace978ee0, 0xaaaace978f00, 0xffffd1c77a40 }
+0xaaaace978ee0->Object::~Object { 0xaaaace978ee0, 0xffffd1c77a40 }
+0xffffd1c77a40->Object::~Object { 0xffffd1c77a40 }
 exception caught: printer
 end { }
 ./c++.at:1362: grep '^exception caught: printer$' stderr
@@ -269762,52 +269508,66 @@
 ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1362:  $PREPARSER ./input aaaaE
 stderr:
+stdout:
+stderr:
+./glr-regression.at:205:  $PREPARSER ./glr-regr1 BPBPB
 exception caught: syntax error, unexpected end of file, expecting 'a'
 ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
 ./c++.at:1362:  $PREPARSER ./input aaaaT
+./glr-regression.at:205: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+714. glr-regression.at:205:  ok
 ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1362:  $PREPARSER ./input aaaaR
 stderr:
 ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-687. c++.at:1362:  ok
+======== Testing with C++ standard flags: ''
 
-722. glr-regression.at:490: testing Improper merging of GLR delayed action sets: glr2.cc ...
-./glr-regression.at:490: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr3.cc glr-regr3.y
-./glr-regression.at:490: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr3 glr-regr3.cc $LIBS
+./c++.at:1362: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+718. glr-regression.at:355: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr.cc ...
+./glr-regression.at:355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr2a.cc glr-regr2a.y
+./glr-regression.at:355: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.cc $LIBS
 stderr:
 stdout:
-./glr-regression.at:488:  $PREPARSER ./glr-regr3 input.txt
+./glr-regression.at:354:  $PREPARSER ./glr-regr2a input1.txt
+stderr:
+./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./glr-regression.at:354:  $PREPARSER ./glr-regr2a input2.txt
 stderr:
-./glr-regression.at:488: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-720. glr-regression.at:488:  ok
-
-723. glr-regression.at:592: testing Duplicate representation of merged trees: %union { char *ptr; } glr.c ...
-./glr-regression.at:592: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr4.c glr-regr4.y
-./glr-regression.at:592: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr4 glr-regr4.c $LIBS
 stderr:
 stdout:
+./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:859:  $PREPARSER ./input
 stderr:
+./glr-regression.at:354:  $PREPARSER ./glr-regr2a input3.txt
 ./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
+stderr:
 ./c++.at:859: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+717. glr-regression.at:354:  ok
+
+719. glr-regression.at:356: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr2.cc ...
+./glr-regression.at:356: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr2a.cc glr-regr2a.y
+./glr-regression.at:356: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.cc $LIBS
 stderr:
 stdout:
-./glr-regression.at:355:  $PREPARSER ./glr-regr2a input1.txt
+./glr-regression.at:206:  $PREPARSER ./glr-regr1 BPBPB
 stderr:
-./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./glr-regression.at:355:  $PREPARSER ./glr-regr2a input2.txt
+./glr-regression.at:206: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+715. glr-regression.at:206:  ok
+
+720. glr-regression.at:488: testing Improper merging of GLR delayed action sets: glr.c ...
+./glr-regression.at:488: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr3.c glr-regr3.y
+./glr-regression.at:488: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr3 glr-regr3.c $LIBS
 stderr:
-./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./glr-regression.at:355:  $PREPARSER ./glr-regr2a input3.txt
+stdout:
+./c++.at:1555:  $PREPARSER ./test
 stderr:
-./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-718. glr-regression.at:355:  ok
+./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+691. c++.at:1517:  ok
 
-724. glr-regression.at:593: testing Duplicate representation of merged trees: %union { char *ptr; } glr.cc ...
-./glr-regression.at:593: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y
-./glr-regression.at:593: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS
 stderr:
 stdout:
 ./c++.at:1363:  $PREPARSER ./input aaaas
@@ -269822,7 +269582,9 @@
 stderr:
 exception caught: initial-action
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+721. glr-regression.at:489: testing Improper merging of GLR delayed action sets: glr.cc ...
 ./c++.at:1363:  $PREPARSER ./input aaaap
+./glr-regression.at:489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr3.cc glr-regr3.y
 stderr:
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1363:  $PREPARSER ./input --debug aaaap
@@ -269831,99 +269593,99 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xffffdca97a80->Object::Object { }
-0xffffdca97b70->Object::Object { 0xffffdca97a80 }
-0xffffdca97a80->Object::~Object { 0xffffdca97a80, 0xffffdca97b70 }
-Next token is token 'a' (0xffffdca97b70 'a')
-0xffffdca97a90->Object::Object { 0xffffdca97b70 }
-0xffffdca97b70->Object::~Object { 0xffffdca97a90, 0xffffdca97b70 }
-Shifting token 'a' (0xffffdca97a90 'a')
-0xaaaad977dee0->Object::Object { 0xffffdca97a90 }
-0xffffdca97a90->Object::~Object { 0xaaaad977dee0, 0xffffdca97a90 }
+0xfffffd640040->Object::Object { }
+0xfffffd640130->Object::Object { 0xfffffd640040 }
+0xfffffd640040->Object::~Object { 0xfffffd640040, 0xfffffd640130 }
+Next token is token 'a' (0xfffffd640130 'a')
+0xfffffd640050->Object::Object { 0xfffffd640130 }
+0xfffffd640130->Object::~Object { 0xfffffd640050, 0xfffffd640130 }
+Shifting token 'a' (0xfffffd640050 'a')
+0xaaaad52f7ee0->Object::Object { 0xfffffd640050 }
+0xfffffd640050->Object::~Object { 0xaaaad52f7ee0, 0xfffffd640050 }
 Entering state 1
 Stack now 0 1
-0xffffdca97b90->Object::Object { 0xaaaad977dee0 }
+0xfffffd640150->Object::Object { 0xaaaad52f7ee0 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaad977dee0 'a')
--> $$ = nterm item (0xffffdca97b90 'a')
-0xaaaad977dee0->Object::~Object { 0xaaaad977dee0, 0xffffdca97b90 }
-0xaaaad977dee0->Object::Object { 0xffffdca97b90 }
-0xffffdca97b90->Object::~Object { 0xaaaad977dee0, 0xffffdca97b90 }
+   $1 = token 'a' (0xaaaad52f7ee0 'a')
+-> $$ = nterm item (0xfffffd640150 'a')
+0xaaaad52f7ee0->Object::~Object { 0xaaaad52f7ee0, 0xfffffd640150 }
+0xaaaad52f7ee0->Object::Object { 0xfffffd640150 }
+0xfffffd640150->Object::~Object { 0xaaaad52f7ee0, 0xfffffd640150 }
 Entering state 10
 Stack now 0 10
 Reading a token
-0xffffdca97a80->Object::Object { 0xaaaad977dee0 }
-0xffffdca97b70->Object::Object { 0xaaaad977dee0, 0xffffdca97a80 }
-0xffffdca97a80->Object::~Object { 0xaaaad977dee0, 0xffffdca97a80, 0xffffdca97b70 }
-Next token is token 'a' (0xffffdca97b70 'a')
-0xffffdca97a90->Object::Object { 0xaaaad977dee0, 0xffffdca97b70 }
-0xffffdca97b70->Object::~Object { 0xaaaad977dee0, 0xffffdca97a90, 0xffffdca97b70 }
-Shifting token 'a' (0xffffdca97a90 'a')
-0xaaaad977df00->Object::Object { 0xaaaad977dee0, 0xffffdca97a90 }
-0xffffdca97a90->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xffffdca97a90 }
+0xfffffd640040->Object::Object { 0xaaaad52f7ee0 }
+0xfffffd640130->Object::Object { 0xaaaad52f7ee0, 0xfffffd640040 }
+0xfffffd640040->Object::~Object { 0xaaaad52f7ee0, 0xfffffd640040, 0xfffffd640130 }
+Next token is token 'a' (0xfffffd640130 'a')
+0xfffffd640050->Object::Object { 0xaaaad52f7ee0, 0xfffffd640130 }
+0xfffffd640130->Object::~Object { 0xaaaad52f7ee0, 0xfffffd640050, 0xfffffd640130 }
+Shifting token 'a' (0xfffffd640050 'a')
+0xaaaad52f7f00->Object::Object { 0xaaaad52f7ee0, 0xfffffd640050 }
+0xfffffd640050->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xfffffd640050 }
 Entering state 1
 Stack now 0 10 1
-0xffffdca97b90->Object::Object { 0xaaaad977dee0, 0xaaaad977df00 }
+0xfffffd640150->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaad977df00 'a')
--> $$ = nterm item (0xffffdca97b90 'a')
-0xaaaad977df00->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xffffdca97b90 }
-0xaaaad977df00->Object::Object { 0xaaaad977dee0, 0xffffdca97b90 }
-0xffffdca97b90->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xffffdca97b90 }
+   $1 = token 'a' (0xaaaad52f7f00 'a')
+-> $$ = nterm item (0xfffffd640150 'a')
+0xaaaad52f7f00->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xfffffd640150 }
+0xaaaad52f7f00->Object::Object { 0xaaaad52f7ee0, 0xfffffd640150 }
+0xfffffd640150->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xfffffd640150 }
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xffffdca97a80->Object::Object { 0xaaaad977dee0, 0xaaaad977df00 }
-0xffffdca97b70->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xffffdca97a80 }
-0xffffdca97a80->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xffffdca97a80, 0xffffdca97b70 }
-Next token is token 'a' (0xffffdca97b70 'a')
-0xffffdca97a90->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xffffdca97b70 }
-0xffffdca97b70->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xffffdca97a90, 0xffffdca97b70 }
-Shifting token 'a' (0xffffdca97a90 'a')
-0xaaaad977df20->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xffffdca97a90 }
-0xffffdca97a90->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xffffdca97a90 }
+0xfffffd640040->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00 }
+0xfffffd640130->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xfffffd640040 }
+0xfffffd640040->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xfffffd640040, 0xfffffd640130 }
+Next token is token 'a' (0xfffffd640130 'a')
+0xfffffd640050->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xfffffd640130 }
+0xfffffd640130->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xfffffd640050, 0xfffffd640130 }
+Shifting token 'a' (0xfffffd640050 'a')
+0xaaaad52f7f20->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xfffffd640050 }
+0xfffffd640050->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xfffffd640050 }
 Entering state 1
 Stack now 0 10 10 1
-0xffffdca97b90->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20 }
+0xfffffd640150->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaad977df20 'a')
--> $$ = nterm item (0xffffdca97b90 'a')
-0xaaaad977df20->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xffffdca97b90 }
-0xaaaad977df20->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xffffdca97b90 }
-0xffffdca97b90->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xffffdca97b90 }
+   $1 = token 'a' (0xaaaad52f7f20 'a')
+-> $$ = nterm item (0xfffffd640150 'a')
+0xaaaad52f7f20->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xfffffd640150 }
+0xaaaad52f7f20->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xfffffd640150 }
+0xfffffd640150->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xfffffd640150 }
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xffffdca97a80->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20 }
-0xffffdca97b70->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xffffdca97a80 }
-0xffffdca97a80->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xffffdca97a80, 0xffffdca97b70 }
-Next token is token 'a' (0xffffdca97b70 'a')
-0xffffdca97a90->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xffffdca97b70 }
-0xffffdca97b70->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xffffdca97a90, 0xffffdca97b70 }
-Shifting token 'a' (0xffffdca97a90 'a')
-0xaaaad977df40->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xffffdca97a90 }
-0xffffdca97a90->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xaaaad977df40, 0xffffdca97a90 }
+0xfffffd640040->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20 }
+0xfffffd640130->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xfffffd640040 }
+0xfffffd640040->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xfffffd640040, 0xfffffd640130 }
+Next token is token 'a' (0xfffffd640130 'a')
+0xfffffd640050->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xfffffd640130 }
+0xfffffd640130->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xfffffd640050, 0xfffffd640130 }
+Shifting token 'a' (0xfffffd640050 'a')
+0xaaaad52f7f40->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xfffffd640050 }
+0xfffffd640050->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xaaaad52f7f40, 0xfffffd640050 }
 Entering state 1
 Stack now 0 10 10 10 1
-0xffffdca97b90->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xaaaad977df40 }
+0xfffffd640150->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xaaaad52f7f40 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaad977df40 'a')
--> $$ = nterm item (0xffffdca97b90 'a')
-0xaaaad977df40->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xaaaad977df40, 0xffffdca97b90 }
-0xaaaad977df40->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xffffdca97b90 }
-0xffffdca97b90->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xaaaad977df40, 0xffffdca97b90 }
+   $1 = token 'a' (0xaaaad52f7f40 'a')
+-> $$ = nterm item (0xfffffd640150 'a')
+0xaaaad52f7f40->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xaaaad52f7f40, 0xfffffd640150 }
+0xaaaad52f7f40->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xfffffd640150 }
+0xfffffd640150->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xaaaad52f7f40, 0xfffffd640150 }
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xffffdca97a80->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xaaaad977df40 }
-0xffffdca97b70->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xaaaad977df40, 0xffffdca97a80 }
-0xffffdca97a80->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xaaaad977df40, 0xffffdca97a80, 0xffffdca97b70 }
-Next token is token 'p' (0xffffdca97b70 'p'Exception caught: cleaning lookahead and stack
-0xaaaad977df40->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xaaaad977df40, 0xffffdca97b70 }
-0xaaaad977df20->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xffffdca97b70 }
-0xaaaad977df00->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xffffdca97b70 }
-0xaaaad977dee0->Object::~Object { 0xaaaad977dee0, 0xffffdca97b70 }
-0xffffdca97b70->Object::~Object { 0xffffdca97b70 }
+0xfffffd640040->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xaaaad52f7f40 }
+0xfffffd640130->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xaaaad52f7f40, 0xfffffd640040 }
+0xfffffd640040->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xaaaad52f7f40, 0xfffffd640040, 0xfffffd640130 }
+Next token is token 'p' (0xfffffd640130 'p'Exception caught: cleaning lookahead and stack
+0xaaaad52f7f40->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xaaaad52f7f40, 0xfffffd640130 }
+0xaaaad52f7f20->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xfffffd640130 }
+0xaaaad52f7f00->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xfffffd640130 }
+0xaaaad52f7ee0->Object::~Object { 0xaaaad52f7ee0, 0xfffffd640130 }
+0xfffffd640130->Object::~Object { 0xfffffd640130 }
 exception caught: printer
 end { }
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -269932,99 +269694,99 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xffffdca97a80->Object::Object { }
-0xffffdca97b70->Object::Object { 0xffffdca97a80 }
-0xffffdca97a80->Object::~Object { 0xffffdca97a80, 0xffffdca97b70 }
-Next token is token 'a' (0xffffdca97b70 'a')
-0xffffdca97a90->Object::Object { 0xffffdca97b70 }
-0xffffdca97b70->Object::~Object { 0xffffdca97a90, 0xffffdca97b70 }
-Shifting token 'a' (0xffffdca97a90 'a')
-0xaaaad977dee0->Object::Object { 0xffffdca97a90 }
-0xffffdca97a90->Object::~Object { 0xaaaad977dee0, 0xffffdca97a90 }
+0xfffffd640040->Object::Object { }
+0xfffffd640130->Object::Object { 0xfffffd640040 }
+0xfffffd640040->Object::~Object { 0xfffffd640040, 0xfffffd640130 }
+Next token is token 'a' (0xfffffd640130 'a')
+0xfffffd640050->Object::Object { 0xfffffd640130 }
+0xfffffd640130->Object::~Object { 0xfffffd640050, 0xfffffd640130 }
+Shifting token 'a' (0xfffffd640050 'a')
+0xaaaad52f7ee0->Object::Object { 0xfffffd640050 }
+0xfffffd640050->Object::~Object { 0xaaaad52f7ee0, 0xfffffd640050 }
 Entering state 1
 Stack now 0 1
-0xffffdca97b90->Object::Object { 0xaaaad977dee0 }
+0xfffffd640150->Object::Object { 0xaaaad52f7ee0 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaad977dee0 'a')
--> $$ = nterm item (0xffffdca97b90 'a')
-0xaaaad977dee0->Object::~Object { 0xaaaad977dee0, 0xffffdca97b90 }
-0xaaaad977dee0->Object::Object { 0xffffdca97b90 }
-0xffffdca97b90->Object::~Object { 0xaaaad977dee0, 0xffffdca97b90 }
+   $1 = token 'a' (0xaaaad52f7ee0 'a')
+-> $$ = nterm item (0xfffffd640150 'a')
+0xaaaad52f7ee0->Object::~Object { 0xaaaad52f7ee0, 0xfffffd640150 }
+0xaaaad52f7ee0->Object::Object { 0xfffffd640150 }
+0xfffffd640150->Object::~Object { 0xaaaad52f7ee0, 0xfffffd640150 }
 Entering state 10
 Stack now 0 10
 Reading a token
-0xffffdca97a80->Object::Object { 0xaaaad977dee0 }
-0xffffdca97b70->Object::Object { 0xaaaad977dee0, 0xffffdca97a80 }
-0xffffdca97a80->Object::~Object { 0xaaaad977dee0, 0xffffdca97a80, 0xffffdca97b70 }
-Next token is token 'a' (0xffffdca97b70 'a')
-0xffffdca97a90->Object::Object { 0xaaaad977dee0, 0xffffdca97b70 }
-0xffffdca97b70->Object::~Object { 0xaaaad977dee0, 0xffffdca97a90, 0xffffdca97b70 }
-Shifting token 'a' (0xffffdca97a90 'a')
-0xaaaad977df00->Object::Object { 0xaaaad977dee0, 0xffffdca97a90 }
-0xffffdca97a90->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xffffdca97a90 }
+0xfffffd640040->Object::Object { 0xaaaad52f7ee0 }
+0xfffffd640130->Object::Object { 0xaaaad52f7ee0, 0xfffffd640040 }
+0xfffffd640040->Object::~Object { 0xaaaad52f7ee0, 0xfffffd640040, 0xfffffd640130 }
+Next token is token 'a' (0xfffffd640130 'a')
+0xfffffd640050->Object::Object { 0xaaaad52f7ee0, 0xfffffd640130 }
+0xfffffd640130->Object::~Object { 0xaaaad52f7ee0, 0xfffffd640050, 0xfffffd640130 }
+Shifting token 'a' (0xfffffd640050 'a')
+0xaaaad52f7f00->Object::Object { 0xaaaad52f7ee0, 0xfffffd640050 }
+0xfffffd640050->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xfffffd640050 }
 Entering state 1
 Stack now 0 10 1
-0xffffdca97b90->Object::Object { 0xaaaad977dee0, 0xaaaad977df00 }
+0xfffffd640150->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaad977df00 'a')
--> $$ = nterm item (0xffffdca97b90 'a')
-0xaaaad977df00->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xffffdca97b90 }
-0xaaaad977df00->Object::Object { 0xaaaad977dee0, 0xffffdca97b90 }
-0xffffdca97b90->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xffffdca97b90 }
+   $1 = token 'a' (0xaaaad52f7f00 'a')
+-> $$ = nterm item (0xfffffd640150 'a')
+0xaaaad52f7f00->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xfffffd640150 }
+0xaaaad52f7f00->Object::Object { 0xaaaad52f7ee0, 0xfffffd640150 }
+0xfffffd640150->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xfffffd640150 }
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xffffdca97a80->Object::Object { 0xaaaad977dee0, 0xaaaad977df00 }
-0xffffdca97b70->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xffffdca97a80 }
-0xffffdca97a80->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xffffdca97a80, 0xffffdca97b70 }
-Next token is token 'a' (0xffffdca97b70 'a')
-0xffffdca97a90->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xffffdca97b70 }
-0xffffdca97b70->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xffffdca97a90, 0xffffdca97b70 }
-Shifting token 'a' (0xffffdca97a90 'a')
-0xaaaad977df20->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xffffdca97a90 }
-0xffffdca97a90->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xffffdca97a90 }
+0xfffffd640040->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00 }
+0xfffffd640130->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xfffffd640040 }
+0xfffffd640040->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xfffffd640040, 0xfffffd640130 }
+Next token is token 'a' (0xfffffd640130 'a')
+0xfffffd640050->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xfffffd640130 }
+0xfffffd640130->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xfffffd640050, 0xfffffd640130 }
+Shifting token 'a' (0xfffffd640050 'a')
+0xaaaad52f7f20->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xfffffd640050 }
+0xfffffd640050->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xfffffd640050 }
 Entering state 1
 Stack now 0 10 10 1
-0xffffdca97b90->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20 }
+0xfffffd640150->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaad977df20 'a')
--> $$ = nterm item (0xffffdca97b90 'a')
-0xaaaad977df20->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xffffdca97b90 }
-0xaaaad977df20->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xffffdca97b90 }
-0xffffdca97b90->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xffffdca97b90 }
+   $1 = token 'a' (0xaaaad52f7f20 'a')
+-> $$ = nterm item (0xfffffd640150 'a')
+0xaaaad52f7f20->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xfffffd640150 }
+0xaaaad52f7f20->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xfffffd640150 }
+0xfffffd640150->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xfffffd640150 }
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xffffdca97a80->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20 }
-0xffffdca97b70->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xffffdca97a80 }
-0xffffdca97a80->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xffffdca97a80, 0xffffdca97b70 }
-Next token is token 'a' (0xffffdca97b70 'a')
-0xffffdca97a90->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xffffdca97b70 }
-0xffffdca97b70->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xffffdca97a90, 0xffffdca97b70 }
-Shifting token 'a' (0xffffdca97a90 'a')
-0xaaaad977df40->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xffffdca97a90 }
-0xffffdca97a90->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xaaaad977df40, 0xffffdca97a90 }
+0xfffffd640040->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20 }
+0xfffffd640130->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xfffffd640040 }
+0xfffffd640040->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xfffffd640040, 0xfffffd640130 }
+Next token is token 'a' (0xfffffd640130 'a')
+0xfffffd640050->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xfffffd640130 }
+0xfffffd640130->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xfffffd640050, 0xfffffd640130 }
+Shifting token 'a' (0xfffffd640050 'a')
+0xaaaad52f7f40->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xfffffd640050 }
+0xfffffd640050->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xaaaad52f7f40, 0xfffffd640050 }
 Entering state 1
 Stack now 0 10 10 10 1
-0xffffdca97b90->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xaaaad977df40 }
+0xfffffd640150->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xaaaad52f7f40 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaad977df40 'a')
--> $$ = nterm item (0xffffdca97b90 'a')
-0xaaaad977df40->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xaaaad977df40, 0xffffdca97b90 }
-0xaaaad977df40->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xffffdca97b90 }
-0xffffdca97b90->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xaaaad977df40, 0xffffdca97b90 }
+   $1 = token 'a' (0xaaaad52f7f40 'a')
+-> $$ = nterm item (0xfffffd640150 'a')
+0xaaaad52f7f40->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xaaaad52f7f40, 0xfffffd640150 }
+0xaaaad52f7f40->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xfffffd640150 }
+0xfffffd640150->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xaaaad52f7f40, 0xfffffd640150 }
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xffffdca97a80->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xaaaad977df40 }
-0xffffdca97b70->Object::Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xaaaad977df40, 0xffffdca97a80 }
-0xffffdca97a80->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xaaaad977df40, 0xffffdca97a80, 0xffffdca97b70 }
-Next token is token 'p' (0xffffdca97b70 'p'Exception caught: cleaning lookahead and stack
-0xaaaad977df40->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xaaaad977df40, 0xffffdca97b70 }
-0xaaaad977df20->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xaaaad977df20, 0xffffdca97b70 }
-0xaaaad977df00->Object::~Object { 0xaaaad977dee0, 0xaaaad977df00, 0xffffdca97b70 }
-0xaaaad977dee0->Object::~Object { 0xaaaad977dee0, 0xffffdca97b70 }
-0xffffdca97b70->Object::~Object { 0xffffdca97b70 }
+0xfffffd640040->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xaaaad52f7f40 }
+0xfffffd640130->Object::Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xaaaad52f7f40, 0xfffffd640040 }
+0xfffffd640040->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xaaaad52f7f40, 0xfffffd640040, 0xfffffd640130 }
+Next token is token 'p' (0xfffffd640130 'p'Exception caught: cleaning lookahead and stack
+0xaaaad52f7f40->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xaaaad52f7f40, 0xfffffd640130 }
+0xaaaad52f7f20->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xaaaad52f7f20, 0xfffffd640130 }
+0xaaaad52f7f00->Object::~Object { 0xaaaad52f7ee0, 0xaaaad52f7f00, 0xfffffd640130 }
+0xaaaad52f7ee0->Object::~Object { 0xaaaad52f7ee0, 0xfffffd640130 }
+0xfffffd640130->Object::~Object { 0xfffffd640130 }
 exception caught: printer
 end { }
 ./c++.at:1363: grep '^exception caught: printer$' stderr
@@ -270035,6 +269797,7 @@
 exception caught: syntax error
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1363:  $PREPARSER ./input aaaaE
+./glr-regression.at:489: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr3 glr-regr3.cc $LIBS
 stderr:
 exception caught: syntax error, unexpected end of file, expecting 'a'
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -270048,31 +269811,37 @@
 ./c++.at:1363: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./glr-regression.at:489:  $PREPARSER ./glr-regr3 input.txt
+./c++.at:859:  $PREPARSER ./input
 stderr:
-./glr-regression.at:489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-721. glr-regression.at:489:  ok
-
+./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:859: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
-./glr-regression.at:592:  $PREPARSER ./glr-regr4
+./glr-regression.at:355:  $PREPARSER ./glr-regr2a input1.txt
 stderr:
-./glr-regression.at:592: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-723. glr-regression.at:592:  ok
+./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./glr-regression.at:355:  $PREPARSER ./glr-regr2a input2.txt
+stderr:
+./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./glr-regression.at:355:  $PREPARSER ./glr-regr2a input3.txt
+stderr:
+./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+718. glr-regression.at:355:  ok
 
-725. glr-regression.at:594: testing Duplicate representation of merged trees: %union { char *ptr; } glr2.cc ...
-./glr-regression.at:594: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y
-./glr-regression.at:594: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS
-726. glr-regression.at:596: testing Duplicate representation of merged trees: api.value.type=union glr.c ...
-./glr-regression.at:596: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr4.c glr-regr4.y
-./glr-regression.at:596: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr4 glr-regr4.c $LIBS
+722. glr-regression.at:490: testing Improper merging of GLR delayed action sets: glr2.cc ...
+./glr-regression.at:490: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr3.cc glr-regr3.y
 stderr:
 stdout:
-./c++.at:859:  $PREPARSER ./input
+./glr-regression.at:488:  $PREPARSER ./glr-regr3 input.txt
 stderr:
-./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:859: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./glr-regression.at:488: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+720. glr-regression.at:488:  ok
+./glr-regression.at:490: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr3 glr-regr3.cc $LIBS
+
+723. glr-regression.at:592: testing Duplicate representation of merged trees: %union { char *ptr; } glr.c ...
+./glr-regression.at:592: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr4.c glr-regr4.y
+./glr-regression.at:592: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr4 glr-regr4.c $LIBS
 stderr:
 stdout:
 ./glr-regression.at:207:  $PREPARSER ./glr-regr1 BPBPB
@@ -270080,100 +269849,310 @@
 ./glr-regression.at:207: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 716. glr-regression.at:207:  ok
 
-727. glr-regression.at:597: testing Duplicate representation of merged trees: api.value.type=union glr.cc ...
-./glr-regression.at:597: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y
-./glr-regression.at:597: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS
+724. glr-regression.at:593: testing Duplicate representation of merged trees: %union { char *ptr; } glr.cc ...
+./glr-regression.at:593: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y
+./glr-regression.at:593: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS
 stderr:
 stdout:
-./glr-regression.at:593:  $PREPARSER ./glr-regr4
+./c++.at:1362:  $PREPARSER ./input aaaas
 stderr:
-./glr-regression.at:593: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-724. glr-regression.at:593:  ok
-
-728. glr-regression.at:598: testing Duplicate representation of merged trees: api.value.type=union glr2.cc ...
-./glr-regression.at:598: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y
-./glr-regression.at:598: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS
+exception caught: reduction
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1362:  $PREPARSER ./input aaaal
 stderr:
-stdout:
-./glr-regression.at:596:  $PREPARSER ./glr-regr4
+exception caught: yylex
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1362:  $PREPARSER ./input i
 stderr:
-./glr-regression.at:596: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-726. glr-regression.at:596:  ok
-
-729. glr-regression.at:670: testing User destructor for unresolved GLR semantic value: glr.c ...
-./glr-regression.at:670: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr5.c glr-regr5.y
-./glr-regression.at:670: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr5 glr-regr5.c $LIBS
+exception caught: initial-action
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1362:  $PREPARSER ./input aaaap
+stderr:
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1362:  $PREPARSER ./input --debug aaaap
+stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+0xffffc734bf30->Object::Object { }
+0xffffc734c000->Object::Object { 0xffffc734bf30 }
+0xffffc734bf30->Object::~Object { 0xffffc734bf30, 0xffffc734c000 }
+Next token is token 'a' (0xffffc734c000 'a')
+0xffffc734bf20->Object::Object { 0xffffc734c000 }
+0xffffc734c000->Object::~Object { 0xffffc734bf20, 0xffffc734c000 }
+Shifting token 'a' (0xffffc734bf20 'a')
+0xaaaae5094ee0->Object::Object { 0xffffc734bf20 }
+0xffffc734bf20->Object::~Object { 0xaaaae5094ee0, 0xffffc734bf20 }
+Entering state 2
+Stack now 0 2
+0xffffc734c020->Object::Object { 0xaaaae5094ee0 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaae5094ee0 'a')
+-> $$ = nterm item (0xffffc734c020 'a')
+0xaaaae5094ee0->Object::~Object { 0xaaaae5094ee0, 0xffffc734c020 }
+0xaaaae5094ee0->Object::Object { 0xffffc734c020 }
+0xffffc734c020->Object::~Object { 0xaaaae5094ee0, 0xffffc734c020 }
+Entering state 11
+Stack now 0 11
+Reading a token
+0xffffc734bf30->Object::Object { 0xaaaae5094ee0 }
+0xffffc734c000->Object::Object { 0xaaaae5094ee0, 0xffffc734bf30 }
+0xffffc734bf30->Object::~Object { 0xaaaae5094ee0, 0xffffc734bf30, 0xffffc734c000 }
+Next token is token 'a' (0xffffc734c000 'a')
+0xffffc734bf20->Object::Object { 0xaaaae5094ee0, 0xffffc734c000 }
+0xffffc734c000->Object::~Object { 0xaaaae5094ee0, 0xffffc734bf20, 0xffffc734c000 }
+Shifting token 'a' (0xffffc734bf20 'a')
+0xaaaae5094f00->Object::Object { 0xaaaae5094ee0, 0xffffc734bf20 }
+0xffffc734bf20->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xffffc734bf20 }
+Entering state 2
+Stack now 0 11 2
+0xffffc734c020->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaae5094f00 'a')
+-> $$ = nterm item (0xffffc734c020 'a')
+0xaaaae5094f00->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xffffc734c020 }
+0xaaaae5094f00->Object::Object { 0xaaaae5094ee0, 0xffffc734c020 }
+0xffffc734c020->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xffffc734c020 }
+Entering state 11
+Stack now 0 11 11
+Reading a token
+0xffffc734bf30->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00 }
+0xffffc734c000->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xffffc734bf30 }
+0xffffc734bf30->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xffffc734bf30, 0xffffc734c000 }
+Next token is token 'a' (0xffffc734c000 'a')
+0xffffc734bf20->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xffffc734c000 }
+0xffffc734c000->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xffffc734bf20, 0xffffc734c000 }
+Shifting token 'a' (0xffffc734bf20 'a')
+0xaaaae5094f20->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xffffc734bf20 }
+0xffffc734bf20->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xffffc734bf20 }
+Entering state 2
+Stack now 0 11 11 2
+0xffffc734c020->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaae5094f20 'a')
+-> $$ = nterm item (0xffffc734c020 'a')
+0xaaaae5094f20->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xffffc734c020 }
+0xaaaae5094f20->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xffffc734c020 }
+0xffffc734c020->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xffffc734c020 }
+Entering state 11
+Stack now 0 11 11 11
+Reading a token
+0xffffc734bf30->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20 }
+0xffffc734c000->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xffffc734bf30 }
+0xffffc734bf30->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xffffc734bf30, 0xffffc734c000 }
+Next token is token 'a' (0xffffc734c000 'a')
+0xffffc734bf20->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xffffc734c000 }
+0xffffc734c000->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xffffc734bf20, 0xffffc734c000 }
+Shifting token 'a' (0xffffc734bf20 'a')
+0xaaaae5094f40->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xffffc734bf20 }
+0xffffc734bf20->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xaaaae5094f40, 0xffffc734bf20 }
+Entering state 2
+Stack now 0 11 11 11 2
+0xffffc734c020->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xaaaae5094f40 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaae5094f40 'a')
+-> $$ = nterm item (0xffffc734c020 'a')
+0xaaaae5094f40->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xaaaae5094f40, 0xffffc734c020 }
+0xaaaae5094f40->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xffffc734c020 }
+0xffffc734c020->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xaaaae5094f40, 0xffffc734c020 }
+Entering state 11
+Stack now 0 11 11 11 11
+Reading a token
+0xffffc734bf30->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xaaaae5094f40 }
+0xffffc734c000->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xaaaae5094f40, 0xffffc734bf30 }
+0xffffc734bf30->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xaaaae5094f40, 0xffffc734bf30, 0xffffc734c000 }
+Next token is token 'p' (0xffffc734c000 'p'Exception caught: cleaning lookahead and stack
+0xaaaae5094f40->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xaaaae5094f40, 0xffffc734c000 }
+0xaaaae5094f20->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xffffc734c000 }
+0xaaaae5094f00->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xffffc734c000 }
+0xaaaae5094ee0->Object::~Object { 0xaaaae5094ee0, 0xffffc734c000 }
+0xffffc734c000->Object::~Object { 0xffffc734c000 }
+exception caught: printer
+end { }
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+Starting parse
+Entering state 0
+Stack now 0
+Reading a token
+0xffffc734bf30->Object::Object { }
+0xffffc734c000->Object::Object { 0xffffc734bf30 }
+0xffffc734bf30->Object::~Object { 0xffffc734bf30, 0xffffc734c000 }
+Next token is token 'a' (0xffffc734c000 'a')
+0xffffc734bf20->Object::Object { 0xffffc734c000 }
+0xffffc734c000->Object::~Object { 0xffffc734bf20, 0xffffc734c000 }
+Shifting token 'a' (0xffffc734bf20 'a')
+0xaaaae5094ee0->Object::Object { 0xffffc734bf20 }
+0xffffc734bf20->Object::~Object { 0xaaaae5094ee0, 0xffffc734bf20 }
+Entering state 2
+Stack now 0 2
+0xffffc734c020->Object::Object { 0xaaaae5094ee0 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaae5094ee0 'a')
+-> $$ = nterm item (0xffffc734c020 'a')
+0xaaaae5094ee0->Object::~Object { 0xaaaae5094ee0, 0xffffc734c020 }
+0xaaaae5094ee0->Object::Object { 0xffffc734c020 }
+0xffffc734c020->Object::~Object { 0xaaaae5094ee0, 0xffffc734c020 }
+Entering state 11
+Stack now 0 11
+Reading a token
+0xffffc734bf30->Object::Object { 0xaaaae5094ee0 }
+0xffffc734c000->Object::Object { 0xaaaae5094ee0, 0xffffc734bf30 }
+0xffffc734bf30->Object::~Object { 0xaaaae5094ee0, 0xffffc734bf30, 0xffffc734c000 }
+Next token is token 'a' (0xffffc734c000 'a')
+0xffffc734bf20->Object::Object { 0xaaaae5094ee0, 0xffffc734c000 }
+0xffffc734c000->Object::~Object { 0xaaaae5094ee0, 0xffffc734bf20, 0xffffc734c000 }
+Shifting token 'a' (0xffffc734bf20 'a')
+0xaaaae5094f00->Object::Object { 0xaaaae5094ee0, 0xffffc734bf20 }
+0xffffc734bf20->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xffffc734bf20 }
+Entering state 2
+Stack now 0 11 2
+0xffffc734c020->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaae5094f00 'a')
+-> $$ = nterm item (0xffffc734c020 'a')
+0xaaaae5094f00->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xffffc734c020 }
+0xaaaae5094f00->Object::Object { 0xaaaae5094ee0, 0xffffc734c020 }
+0xffffc734c020->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xffffc734c020 }
+Entering state 11
+Stack now 0 11 11
+Reading a token
+0xffffc734bf30->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00 }
+0xffffc734c000->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xffffc734bf30 }
+0xffffc734bf30->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xffffc734bf30, 0xffffc734c000 }
+Next token is token 'a' (0xffffc734c000 'a')
+0xffffc734bf20->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xffffc734c000 }
+0xffffc734c000->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xffffc734bf20, 0xffffc734c000 }
+Shifting token 'a' (0xffffc734bf20 'a')
+0xaaaae5094f20->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xffffc734bf20 }
+0xffffc734bf20->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xffffc734bf20 }
+Entering state 2
+Stack now 0 11 11 2
+0xffffc734c020->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaae5094f20 'a')
+-> $$ = nterm item (0xffffc734c020 'a')
+0xaaaae5094f20->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xffffc734c020 }
+0xaaaae5094f20->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xffffc734c020 }
+0xffffc734c020->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xffffc734c020 }
+Entering state 11
+Stack now 0 11 11 11
+Reading a token
+0xffffc734bf30->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20 }
+0xffffc734c000->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xffffc734bf30 }
+0xffffc734bf30->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xffffc734bf30, 0xffffc734c000 }
+Next token is token 'a' (0xffffc734c000 'a')
+0xffffc734bf20->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xffffc734c000 }
+0xffffc734c000->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xffffc734bf20, 0xffffc734c000 }
+Shifting token 'a' (0xffffc734bf20 'a')
+0xaaaae5094f40->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xffffc734bf20 }
+0xffffc734bf20->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xaaaae5094f40, 0xffffc734bf20 }
+Entering state 2
+Stack now 0 11 11 11 2
+0xffffc734c020->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xaaaae5094f40 }
+Reducing stack by rule 4 (line 142):
+   $1 = token 'a' (0xaaaae5094f40 'a')
+-> $$ = nterm item (0xffffc734c020 'a')
+0xaaaae5094f40->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xaaaae5094f40, 0xffffc734c020 }
+0xaaaae5094f40->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xffffc734c020 }
+0xffffc734c020->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xaaaae5094f40, 0xffffc734c020 }
+Entering state 11
+Stack now 0 11 11 11 11
+Reading a token
+0xffffc734bf30->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xaaaae5094f40 }
+0xffffc734c000->Object::Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xaaaae5094f40, 0xffffc734bf30 }
+0xffffc734bf30->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xaaaae5094f40, 0xffffc734bf30, 0xffffc734c000 }
+Next token is token 'p' (0xffffc734c000 'p'Exception caught: cleaning lookahead and stack
+0xaaaae5094f40->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xaaaae5094f40, 0xffffc734c000 }
+0xaaaae5094f20->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xaaaae5094f20, 0xffffc734c000 }
+0xaaaae5094f00->Object::~Object { 0xaaaae5094ee0, 0xaaaae5094f00, 0xffffc734c000 }
+0xaaaae5094ee0->Object::~Object { 0xaaaae5094ee0, 0xffffc734c000 }
+0xffffc734c000->Object::~Object { 0xffffc734c000 }
+exception caught: printer
+end { }
+./c++.at:1362: grep '^exception caught: printer$' stderr
 stdout:
-./glr-regression.at:356:  $PREPARSER ./glr-regr2a input1.txt
+exception caught: printer
+./c++.at:1362:  $PREPARSER ./input aaaae
 stderr:
-./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./glr-regression.at:356:  $PREPARSER ./glr-regr2a input2.txt
+exception caught: syntax error
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1362:  $PREPARSER ./input aaaaE
 stderr:
-./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./glr-regression.at:356:  $PREPARSER ./glr-regr2a input3.txt
+exception caught: syntax error, unexpected end of file, expecting 'a'
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1362:  $PREPARSER ./input aaaaT
 stderr:
-./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-719. glr-regression.at:356:  ok
-
-730. glr-regression.at:671: testing User destructor for unresolved GLR semantic value: glr.cc ...
-./glr-regression.at:671: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr5.cc glr-regr5.y
-./glr-regression.at:671: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr5 glr-regr5.cc $LIBS
 stderr:
 stdout:
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:859:  $PREPARSER ./input
 stderr:
+./c++.at:1362:  $PREPARSER ./input aaaaR
+stderr:
 ./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
 ./c++.at:859: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+687. c++.at:1362:  ok
+
+725. glr-regression.at:594: testing Duplicate representation of merged trees: %union { char *ptr; } glr2.cc ...
+./glr-regression.at:594: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y
+./glr-regression.at:594: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS
 stderr:
 stdout:
-./glr-regression.at:670:  $PREPARSER ./glr-regr5
+./glr-regression.at:489:  $PREPARSER ./glr-regr3 input.txt
 stderr:
-Ambiguity detected.
-Option 1,
-  start -> <Rule 1, tokens 1 .. 1>
-    'a' <tokens 1 .. 1>
-
-Option 2,
-  start -> <Rule 2, tokens 1 .. 1>
-    'a' <tokens 1 .. 1>
+./glr-regression.at:489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+721. glr-regression.at:489:  ok
 
-syntax is ambiguous
-./glr-regression.at:670: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-729. glr-regression.at:670:  ok
+stderr:
+stdout:
+./glr-regression.at:592:  $PREPARSER ./glr-regr4
+stderr:
+./glr-regression.at:592: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+726. glr-regression.at:596: testing Duplicate representation of merged trees: api.value.type=union glr.c ...
+./glr-regression.at:596: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr4.c glr-regr4.y
+723. glr-regression.at:592:  ok
 
+./glr-regression.at:596: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr4 glr-regr4.c $LIBS
+727. glr-regression.at:597: testing Duplicate representation of merged trees: api.value.type=union glr.cc ...
+./glr-regression.at:597: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y
+./glr-regression.at:597: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS
 stderr:
 stdout:
-./glr-regression.at:597:  $PREPARSER ./glr-regr4
+./glr-regression.at:356:  $PREPARSER ./glr-regr2a input1.txt
 stderr:
-./glr-regression.at:597: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-731. glr-regression.at:672: testing User destructor for unresolved GLR semantic value: glr2.cc ...
-./glr-regression.at:672: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr5.cc glr-regr5.y
-727. glr-regression.at:597:  ok
+./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./glr-regression.at:356:  $PREPARSER ./glr-regr2a input2.txt
+stderr:
+./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./glr-regression.at:356:  $PREPARSER ./glr-regr2a input3.txt
+stderr:
+./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+719. glr-regression.at:356:  ok
 
-./glr-regression.at:672: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr5 glr-regr5.cc $LIBS
-732. glr-regression.at:738: testing User destructor after an error during a split parse: glr.c ...
-./glr-regression.at:738: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr6.c glr-regr6.y
-./glr-regression.at:738: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr6 glr-regr6.c $LIBS
+728. glr-regression.at:598: testing Duplicate representation of merged trees: api.value.type=union glr2.cc ...
+./glr-regression.at:598: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y
+./glr-regression.at:598: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS
 stderr:
 stdout:
-./glr-regression.at:490:  $PREPARSER ./glr-regr3 input.txt
+./glr-regression.at:596:  $PREPARSER ./glr-regr4
 stderr:
-./glr-regression.at:490: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-722. glr-regression.at:490:  ok
+./glr-regression.at:596: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+726. glr-regression.at:596:  ok
 
-733. glr-regression.at:739: testing User destructor after an error during a split parse: glr.cc ...
-./glr-regression.at:739: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr6.cc glr-regr6.y
 stderr:
 stdout:
 ./c++.at:1363:  $PREPARSER ./input aaaas
 stderr:
-./glr-regression.at:739: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr6 glr-regr6.cc $LIBS
 exception caught: reduction
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1363:  $PREPARSER ./input aaaal
 stderr:
+729. glr-regression.at:670: testing User destructor for unresolved GLR semantic value: glr.c ...
+./glr-regression.at:670: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr5.c glr-regr5.y
 exception caught: yylex
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1363:  $PREPARSER ./input i
@@ -270182,6 +270161,7 @@
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1363:  $PREPARSER ./input aaaap
 stderr:
+./glr-regression.at:670: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr5 glr-regr5.c $LIBS
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1363:  $PREPARSER ./input --debug aaaap
 stderr:
@@ -270189,99 +270169,99 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xffffd7c1a7f0->Object::Object { }
-0xffffd7c1a8e0->Object::Object { 0xffffd7c1a7f0 }
-0xffffd7c1a7f0->Object::~Object { 0xffffd7c1a7f0, 0xffffd7c1a8e0 }
-Next token is token 'a' (0xffffd7c1a8e0 'a')
-0xffffd7c1a800->Object::Object { 0xffffd7c1a8e0 }
-0xffffd7c1a8e0->Object::~Object { 0xffffd7c1a800, 0xffffd7c1a8e0 }
-Shifting token 'a' (0xffffd7c1a800 'a')
-0xaaaac12f1ee0->Object::Object { 0xffffd7c1a800 }
-0xffffd7c1a800->Object::~Object { 0xaaaac12f1ee0, 0xffffd7c1a800 }
+0xfffff206ee80->Object::Object { }
+0xfffff206ef70->Object::Object { 0xfffff206ee80 }
+0xfffff206ee80->Object::~Object { 0xfffff206ee80, 0xfffff206ef70 }
+Next token is token 'a' (0xfffff206ef70 'a')
+0xfffff206ee90->Object::Object { 0xfffff206ef70 }
+0xfffff206ef70->Object::~Object { 0xfffff206ee90, 0xfffff206ef70 }
+Shifting token 'a' (0xfffff206ee90 'a')
+0xaaaaccfe8ee0->Object::Object { 0xfffff206ee90 }
+0xfffff206ee90->Object::~Object { 0xaaaaccfe8ee0, 0xfffff206ee90 }
 Entering state 1
 Stack now 0 1
-0xffffd7c1a900->Object::Object { 0xaaaac12f1ee0 }
+0xfffff206ef90->Object::Object { 0xaaaaccfe8ee0 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaac12f1ee0 'a')
--> $$ = nterm item (0xffffd7c1a900 'a')
-0xaaaac12f1ee0->Object::~Object { 0xaaaac12f1ee0, 0xffffd7c1a900 }
-0xaaaac12f1ee0->Object::Object { 0xffffd7c1a900 }
-0xffffd7c1a900->Object::~Object { 0xaaaac12f1ee0, 0xffffd7c1a900 }
+   $1 = token 'a' (0xaaaaccfe8ee0 'a')
+-> $$ = nterm item (0xfffff206ef90 'a')
+0xaaaaccfe8ee0->Object::~Object { 0xaaaaccfe8ee0, 0xfffff206ef90 }
+0xaaaaccfe8ee0->Object::Object { 0xfffff206ef90 }
+0xfffff206ef90->Object::~Object { 0xaaaaccfe8ee0, 0xfffff206ef90 }
 Entering state 10
 Stack now 0 10
 Reading a token
-0xffffd7c1a7f0->Object::Object { 0xaaaac12f1ee0 }
-0xffffd7c1a8e0->Object::Object { 0xaaaac12f1ee0, 0xffffd7c1a7f0 }
-0xffffd7c1a7f0->Object::~Object { 0xaaaac12f1ee0, 0xffffd7c1a7f0, 0xffffd7c1a8e0 }
-Next token is token 'a' (0xffffd7c1a8e0 'a')
-0xffffd7c1a800->Object::Object { 0xaaaac12f1ee0, 0xffffd7c1a8e0 }
-0xffffd7c1a8e0->Object::~Object { 0xaaaac12f1ee0, 0xffffd7c1a800, 0xffffd7c1a8e0 }
-Shifting token 'a' (0xffffd7c1a800 'a')
-0xaaaac12f1f00->Object::Object { 0xaaaac12f1ee0, 0xffffd7c1a800 }
-0xffffd7c1a800->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xffffd7c1a800 }
+0xfffff206ee80->Object::Object { 0xaaaaccfe8ee0 }
+0xfffff206ef70->Object::Object { 0xaaaaccfe8ee0, 0xfffff206ee80 }
+0xfffff206ee80->Object::~Object { 0xaaaaccfe8ee0, 0xfffff206ee80, 0xfffff206ef70 }
+Next token is token 'a' (0xfffff206ef70 'a')
+0xfffff206ee90->Object::Object { 0xaaaaccfe8ee0, 0xfffff206ef70 }
+0xfffff206ef70->Object::~Object { 0xaaaaccfe8ee0, 0xfffff206ee90, 0xfffff206ef70 }
+Shifting token 'a' (0xfffff206ee90 'a')
+0xaaaaccfe8f00->Object::Object { 0xaaaaccfe8ee0, 0xfffff206ee90 }
+0xfffff206ee90->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xfffff206ee90 }
 Entering state 1
 Stack now 0 10 1
-0xffffd7c1a900->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00 }
+0xfffff206ef90->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaac12f1f00 'a')
--> $$ = nterm item (0xffffd7c1a900 'a')
-0xaaaac12f1f00->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xffffd7c1a900 }
-0xaaaac12f1f00->Object::Object { 0xaaaac12f1ee0, 0xffffd7c1a900 }
-0xffffd7c1a900->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xffffd7c1a900 }
+   $1 = token 'a' (0xaaaaccfe8f00 'a')
+-> $$ = nterm item (0xfffff206ef90 'a')
+0xaaaaccfe8f00->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xfffff206ef90 }
+0xaaaaccfe8f00->Object::Object { 0xaaaaccfe8ee0, 0xfffff206ef90 }
+0xfffff206ef90->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xfffff206ef90 }
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xffffd7c1a7f0->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00 }
-0xffffd7c1a8e0->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xffffd7c1a7f0 }
-0xffffd7c1a7f0->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xffffd7c1a7f0, 0xffffd7c1a8e0 }
-Next token is token 'a' (0xffffd7c1a8e0 'a')
-0xffffd7c1a800->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xffffd7c1a8e0 }
-0xffffd7c1a8e0->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xffffd7c1a800, 0xffffd7c1a8e0 }
-Shifting token 'a' (0xffffd7c1a800 'a')
-0xaaaac12f1f20->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xffffd7c1a800 }
-0xffffd7c1a800->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xffffd7c1a800 }
+0xfffff206ee80->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00 }
+0xfffff206ef70->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xfffff206ee80 }
+0xfffff206ee80->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xfffff206ee80, 0xfffff206ef70 }
+Next token is token 'a' (0xfffff206ef70 'a')
+0xfffff206ee90->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xfffff206ef70 }
+0xfffff206ef70->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xfffff206ee90, 0xfffff206ef70 }
+Shifting token 'a' (0xfffff206ee90 'a')
+0xaaaaccfe8f20->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xfffff206ee90 }
+0xfffff206ee90->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xfffff206ee90 }
 Entering state 1
 Stack now 0 10 10 1
-0xffffd7c1a900->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20 }
+0xfffff206ef90->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaac12f1f20 'a')
--> $$ = nterm item (0xffffd7c1a900 'a')
-0xaaaac12f1f20->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xffffd7c1a900 }
-0xaaaac12f1f20->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xffffd7c1a900 }
-0xffffd7c1a900->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xffffd7c1a900 }
+   $1 = token 'a' (0xaaaaccfe8f20 'a')
+-> $$ = nterm item (0xfffff206ef90 'a')
+0xaaaaccfe8f20->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xfffff206ef90 }
+0xaaaaccfe8f20->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xfffff206ef90 }
+0xfffff206ef90->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xfffff206ef90 }
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xffffd7c1a7f0->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20 }
-0xffffd7c1a8e0->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xffffd7c1a7f0 }
-0xffffd7c1a7f0->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xffffd7c1a7f0, 0xffffd7c1a8e0 }
-Next token is token 'a' (0xffffd7c1a8e0 'a')
-0xffffd7c1a800->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xffffd7c1a8e0 }
-0xffffd7c1a8e0->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xffffd7c1a800, 0xffffd7c1a8e0 }
-Shifting token 'a' (0xffffd7c1a800 'a')
-0xaaaac12f1f40->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xffffd7c1a800 }
-0xffffd7c1a800->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xaaaac12f1f40, 0xffffd7c1a800 }
+0xfffff206ee80->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20 }
+0xfffff206ef70->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xfffff206ee80 }
+0xfffff206ee80->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xfffff206ee80, 0xfffff206ef70 }
+Next token is token 'a' (0xfffff206ef70 'a')
+0xfffff206ee90->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xfffff206ef70 }
+0xfffff206ef70->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xfffff206ee90, 0xfffff206ef70 }
+Shifting token 'a' (0xfffff206ee90 'a')
+0xaaaaccfe8f40->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xfffff206ee90 }
+0xfffff206ee90->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xaaaaccfe8f40, 0xfffff206ee90 }
 Entering state 1
 Stack now 0 10 10 10 1
-0xffffd7c1a900->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xaaaac12f1f40 }
+0xfffff206ef90->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xaaaaccfe8f40 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaac12f1f40 'a')
--> $$ = nterm item (0xffffd7c1a900 'a')
-0xaaaac12f1f40->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xaaaac12f1f40, 0xffffd7c1a900 }
-0xaaaac12f1f40->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xffffd7c1a900 }
-0xffffd7c1a900->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xaaaac12f1f40, 0xffffd7c1a900 }
+   $1 = token 'a' (0xaaaaccfe8f40 'a')
+-> $$ = nterm item (0xfffff206ef90 'a')
+0xaaaaccfe8f40->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xaaaaccfe8f40, 0xfffff206ef90 }
+0xaaaaccfe8f40->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xfffff206ef90 }
+0xfffff206ef90->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xaaaaccfe8f40, 0xfffff206ef90 }
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xffffd7c1a7f0->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xaaaac12f1f40 }
-0xffffd7c1a8e0->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xaaaac12f1f40, 0xffffd7c1a7f0 }
-0xffffd7c1a7f0->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xaaaac12f1f40, 0xffffd7c1a7f0, 0xffffd7c1a8e0 }
-Next token is token 'p' (0xffffd7c1a8e0 'p'Exception caught: cleaning lookahead and stack
-0xaaaac12f1f40->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xaaaac12f1f40, 0xffffd7c1a8e0 }
-0xaaaac12f1f20->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xffffd7c1a8e0 }
-0xaaaac12f1f00->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xffffd7c1a8e0 }
-0xaaaac12f1ee0->Object::~Object { 0xaaaac12f1ee0, 0xffffd7c1a8e0 }
-0xffffd7c1a8e0->Object::~Object { 0xffffd7c1a8e0 }
+0xfffff206ee80->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xaaaaccfe8f40 }
+0xfffff206ef70->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xaaaaccfe8f40, 0xfffff206ee80 }
+0xfffff206ee80->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xaaaaccfe8f40, 0xfffff206ee80, 0xfffff206ef70 }
+Next token is token 'p' (0xfffff206ef70 'p'Exception caught: cleaning lookahead and stack
+0xaaaaccfe8f40->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xaaaaccfe8f40, 0xfffff206ef70 }
+0xaaaaccfe8f20->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xfffff206ef70 }
+0xaaaaccfe8f00->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xfffff206ef70 }
+0xaaaaccfe8ee0->Object::~Object { 0xaaaaccfe8ee0, 0xfffff206ef70 }
+0xfffff206ef70->Object::~Object { 0xfffff206ef70 }
 exception caught: printer
 end { }
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -270290,99 +270270,99 @@
 Entering state 0
 Stack now 0
 Reading a token
-0xffffd7c1a7f0->Object::Object { }
-0xffffd7c1a8e0->Object::Object { 0xffffd7c1a7f0 }
-0xffffd7c1a7f0->Object::~Object { 0xffffd7c1a7f0, 0xffffd7c1a8e0 }
-Next token is token 'a' (0xffffd7c1a8e0 'a')
-0xffffd7c1a800->Object::Object { 0xffffd7c1a8e0 }
-0xffffd7c1a8e0->Object::~Object { 0xffffd7c1a800, 0xffffd7c1a8e0 }
-Shifting token 'a' (0xffffd7c1a800 'a')
-0xaaaac12f1ee0->Object::Object { 0xffffd7c1a800 }
-0xffffd7c1a800->Object::~Object { 0xaaaac12f1ee0, 0xffffd7c1a800 }
+0xfffff206ee80->Object::Object { }
+0xfffff206ef70->Object::Object { 0xfffff206ee80 }
+0xfffff206ee80->Object::~Object { 0xfffff206ee80, 0xfffff206ef70 }
+Next token is token 'a' (0xfffff206ef70 'a')
+0xfffff206ee90->Object::Object { 0xfffff206ef70 }
+0xfffff206ef70->Object::~Object { 0xfffff206ee90, 0xfffff206ef70 }
+Shifting token 'a' (0xfffff206ee90 'a')
+0xaaaaccfe8ee0->Object::Object { 0xfffff206ee90 }
+0xfffff206ee90->Object::~Object { 0xaaaaccfe8ee0, 0xfffff206ee90 }
 Entering state 1
 Stack now 0 1
-0xffffd7c1a900->Object::Object { 0xaaaac12f1ee0 }
+0xfffff206ef90->Object::Object { 0xaaaaccfe8ee0 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaac12f1ee0 'a')
--> $$ = nterm item (0xffffd7c1a900 'a')
-0xaaaac12f1ee0->Object::~Object { 0xaaaac12f1ee0, 0xffffd7c1a900 }
-0xaaaac12f1ee0->Object::Object { 0xffffd7c1a900 }
-0xffffd7c1a900->Object::~Object { 0xaaaac12f1ee0, 0xffffd7c1a900 }
+   $1 = token 'a' (0xaaaaccfe8ee0 'a')
+-> $$ = nterm item (0xfffff206ef90 'a')
+0xaaaaccfe8ee0->Object::~Object { 0xaaaaccfe8ee0, 0xfffff206ef90 }
+0xaaaaccfe8ee0->Object::Object { 0xfffff206ef90 }
+0xfffff206ef90->Object::~Object { 0xaaaaccfe8ee0, 0xfffff206ef90 }
 Entering state 10
 Stack now 0 10
 Reading a token
-0xffffd7c1a7f0->Object::Object { 0xaaaac12f1ee0 }
-0xffffd7c1a8e0->Object::Object { 0xaaaac12f1ee0, 0xffffd7c1a7f0 }
-0xffffd7c1a7f0->Object::~Object { 0xaaaac12f1ee0, 0xffffd7c1a7f0, 0xffffd7c1a8e0 }
-Next token is token 'a' (0xffffd7c1a8e0 'a')
-0xffffd7c1a800->Object::Object { 0xaaaac12f1ee0, 0xffffd7c1a8e0 }
-0xffffd7c1a8e0->Object::~Object { 0xaaaac12f1ee0, 0xffffd7c1a800, 0xffffd7c1a8e0 }
-Shifting token 'a' (0xffffd7c1a800 'a')
-0xaaaac12f1f00->Object::Object { 0xaaaac12f1ee0, 0xffffd7c1a800 }
-0xffffd7c1a800->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xffffd7c1a800 }
+0xfffff206ee80->Object::Object { 0xaaaaccfe8ee0 }
+0xfffff206ef70->Object::Object { 0xaaaaccfe8ee0, 0xfffff206ee80 }
+0xfffff206ee80->Object::~Object { 0xaaaaccfe8ee0, 0xfffff206ee80, 0xfffff206ef70 }
+Next token is token 'a' (0xfffff206ef70 'a')
+0xfffff206ee90->Object::Object { 0xaaaaccfe8ee0, 0xfffff206ef70 }
+0xfffff206ef70->Object::~Object { 0xaaaaccfe8ee0, 0xfffff206ee90, 0xfffff206ef70 }
+Shifting token 'a' (0xfffff206ee90 'a')
+0xaaaaccfe8f00->Object::Object { 0xaaaaccfe8ee0, 0xfffff206ee90 }
+0xfffff206ee90->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xfffff206ee90 }
 Entering state 1
 Stack now 0 10 1
-0xffffd7c1a900->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00 }
+0xfffff206ef90->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaac12f1f00 'a')
--> $$ = nterm item (0xffffd7c1a900 'a')
-0xaaaac12f1f00->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xffffd7c1a900 }
-0xaaaac12f1f00->Object::Object { 0xaaaac12f1ee0, 0xffffd7c1a900 }
-0xffffd7c1a900->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xffffd7c1a900 }
+   $1 = token 'a' (0xaaaaccfe8f00 'a')
+-> $$ = nterm item (0xfffff206ef90 'a')
+0xaaaaccfe8f00->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xfffff206ef90 }
+0xaaaaccfe8f00->Object::Object { 0xaaaaccfe8ee0, 0xfffff206ef90 }
+0xfffff206ef90->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xfffff206ef90 }
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xffffd7c1a7f0->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00 }
-0xffffd7c1a8e0->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xffffd7c1a7f0 }
-0xffffd7c1a7f0->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xffffd7c1a7f0, 0xffffd7c1a8e0 }
-Next token is token 'a' (0xffffd7c1a8e0 'a')
-0xffffd7c1a800->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xffffd7c1a8e0 }
-0xffffd7c1a8e0->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xffffd7c1a800, 0xffffd7c1a8e0 }
-Shifting token 'a' (0xffffd7c1a800 'a')
-0xaaaac12f1f20->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xffffd7c1a800 }
-0xffffd7c1a800->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xffffd7c1a800 }
+0xfffff206ee80->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00 }
+0xfffff206ef70->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xfffff206ee80 }
+0xfffff206ee80->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xfffff206ee80, 0xfffff206ef70 }
+Next token is token 'a' (0xfffff206ef70 'a')
+0xfffff206ee90->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xfffff206ef70 }
+0xfffff206ef70->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xfffff206ee90, 0xfffff206ef70 }
+Shifting token 'a' (0xfffff206ee90 'a')
+0xaaaaccfe8f20->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xfffff206ee90 }
+0xfffff206ee90->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xfffff206ee90 }
 Entering state 1
 Stack now 0 10 10 1
-0xffffd7c1a900->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20 }
+0xfffff206ef90->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaac12f1f20 'a')
--> $$ = nterm item (0xffffd7c1a900 'a')
-0xaaaac12f1f20->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xffffd7c1a900 }
-0xaaaac12f1f20->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xffffd7c1a900 }
-0xffffd7c1a900->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xffffd7c1a900 }
+   $1 = token 'a' (0xaaaaccfe8f20 'a')
+-> $$ = nterm item (0xfffff206ef90 'a')
+0xaaaaccfe8f20->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xfffff206ef90 }
+0xaaaaccfe8f20->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xfffff206ef90 }
+0xfffff206ef90->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xfffff206ef90 }
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xffffd7c1a7f0->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20 }
-0xffffd7c1a8e0->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xffffd7c1a7f0 }
-0xffffd7c1a7f0->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xffffd7c1a7f0, 0xffffd7c1a8e0 }
-Next token is token 'a' (0xffffd7c1a8e0 'a')
-0xffffd7c1a800->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xffffd7c1a8e0 }
-0xffffd7c1a8e0->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xffffd7c1a800, 0xffffd7c1a8e0 }
-Shifting token 'a' (0xffffd7c1a800 'a')
-0xaaaac12f1f40->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xffffd7c1a800 }
-0xffffd7c1a800->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xaaaac12f1f40, 0xffffd7c1a800 }
+0xfffff206ee80->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20 }
+0xfffff206ef70->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xfffff206ee80 }
+0xfffff206ee80->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xfffff206ee80, 0xfffff206ef70 }
+Next token is token 'a' (0xfffff206ef70 'a')
+0xfffff206ee90->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xfffff206ef70 }
+0xfffff206ef70->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xfffff206ee90, 0xfffff206ef70 }
+Shifting token 'a' (0xfffff206ee90 'a')
+0xaaaaccfe8f40->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xfffff206ee90 }
+0xfffff206ee90->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xaaaaccfe8f40, 0xfffff206ee90 }
 Entering state 1
 Stack now 0 10 10 10 1
-0xffffd7c1a900->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xaaaac12f1f40 }
+0xfffff206ef90->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xaaaaccfe8f40 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaaac12f1f40 'a')
--> $$ = nterm item (0xffffd7c1a900 'a')
-0xaaaac12f1f40->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xaaaac12f1f40, 0xffffd7c1a900 }
-0xaaaac12f1f40->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xffffd7c1a900 }
-0xffffd7c1a900->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xaaaac12f1f40, 0xffffd7c1a900 }
+   $1 = token 'a' (0xaaaaccfe8f40 'a')
+-> $$ = nterm item (0xfffff206ef90 'a')
+0xaaaaccfe8f40->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xaaaaccfe8f40, 0xfffff206ef90 }
+0xaaaaccfe8f40->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xfffff206ef90 }
+0xfffff206ef90->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xaaaaccfe8f40, 0xfffff206ef90 }
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xffffd7c1a7f0->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xaaaac12f1f40 }
-0xffffd7c1a8e0->Object::Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xaaaac12f1f40, 0xffffd7c1a7f0 }
-0xffffd7c1a7f0->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xaaaac12f1f40, 0xffffd7c1a7f0, 0xffffd7c1a8e0 }
-Next token is token 'p' (0xffffd7c1a8e0 'p'Exception caught: cleaning lookahead and stack
-0xaaaac12f1f40->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xaaaac12f1f40, 0xffffd7c1a8e0 }
-0xaaaac12f1f20->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xaaaac12f1f20, 0xffffd7c1a8e0 }
-0xaaaac12f1f00->Object::~Object { 0xaaaac12f1ee0, 0xaaaac12f1f00, 0xffffd7c1a8e0 }
-0xaaaac12f1ee0->Object::~Object { 0xaaaac12f1ee0, 0xffffd7c1a8e0 }
-0xffffd7c1a8e0->Object::~Object { 0xffffd7c1a8e0 }
+0xfffff206ee80->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xaaaaccfe8f40 }
+0xfffff206ef70->Object::Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xaaaaccfe8f40, 0xfffff206ee80 }
+0xfffff206ee80->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xaaaaccfe8f40, 0xfffff206ee80, 0xfffff206ef70 }
+Next token is token 'p' (0xfffff206ef70 'p'Exception caught: cleaning lookahead and stack
+0xaaaaccfe8f40->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xaaaaccfe8f40, 0xfffff206ef70 }
+0xaaaaccfe8f20->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xaaaaccfe8f20, 0xfffff206ef70 }
+0xaaaaccfe8f00->Object::~Object { 0xaaaaccfe8ee0, 0xaaaaccfe8f00, 0xfffff206ef70 }
+0xaaaaccfe8ee0->Object::~Object { 0xaaaaccfe8ee0, 0xfffff206ef70 }
+0xfffff206ef70->Object::~Object { 0xfffff206ef70 }
 exception caught: printer
 end { }
 ./c++.at:1363: grep '^exception caught: printer$' stderr
@@ -270394,19 +270374,53 @@
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1363:  $PREPARSER ./input aaaaE
 stderr:
+stderr:
+stdout:
 exception caught: syntax error, unexpected end of file, expecting 'a'
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./glr-regression.at:593:  $PREPARSER ./glr-regr4
+stderr:
 ./c++.at:1363:  $PREPARSER ./input aaaaT
+./glr-regression.at:593: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+724. glr-regression.at:593:  ok
 ./c++.at:1363:  $PREPARSER ./input aaaaR
 stderr:
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ======== Testing with C++ standard flags: ''
+
 ./c++.at:1363: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+730. glr-regression.at:671: testing User destructor for unresolved GLR semantic value: glr.cc ...
+./glr-regression.at:671: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr5.cc glr-regr5.y
+./glr-regression.at:671: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr5 glr-regr5.cc $LIBS
 stderr:
 stdout:
-./glr-regression.at:671:  $PREPARSER ./glr-regr5
+./c++.at:859:  $PREPARSER ./input
+stderr:
+./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:859: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./glr-regression.at:597:  $PREPARSER ./glr-regr4
+stderr:
+./glr-regression.at:597: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+727. glr-regression.at:597:  ok
+
+731. glr-regression.at:672: testing User destructor for unresolved GLR semantic value: glr2.cc ...
+./glr-regression.at:672: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr5.cc glr-regr5.y
+./glr-regression.at:672: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr5 glr-regr5.cc $LIBS
+stderr:
+stdout:
+./glr-regression.at:490:  $PREPARSER ./glr-regr3 input.txt
+stderr:
+./glr-regression.at:490: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+722. glr-regression.at:490:  ok
+stderr:
+
+stdout:
+./glr-regression.at:670:  $PREPARSER ./glr-regr5
 stderr:
 Ambiguity detected.
 Option 1,
@@ -270418,15 +270432,25 @@
     'a' <tokens 1 .. 1>
 
 syntax is ambiguous
-./glr-regression.at:671: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-730. glr-regression.at:671:  ok
+./glr-regression.at:670: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+729. glr-regression.at:670:  ok
 
+732. glr-regression.at:738: testing User destructor after an error during a split parse: glr.c ...
+./glr-regression.at:738: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr6.c glr-regr6.y
+./glr-regression.at:738: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr6 glr-regr6.c $LIBS
+733. glr-regression.at:739: testing User destructor after an error during a split parse: glr.cc ...
+./glr-regression.at:739: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr6.cc glr-regr6.y
+./glr-regression.at:739: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr6 glr-regr6.cc $LIBS
 stderr:
 stdout:
-./glr-regression.at:738:  $PREPARSER ./glr-regr6
+./glr-regression.at:594:  $PREPARSER ./glr-regr4
+stderr:
+./glr-regression.at:594: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+725. glr-regression.at:594:  ok
+stdout:
+./glr-regression.at:671:  $PREPARSER ./glr-regr5
 stderr:
-734. glr-regression.at:740: testing User destructor after an error during a split parse: glr2.cc ...
-./glr-regression.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr6.cc glr-regr6.y
 Ambiguity detected.
 Option 1,
   start -> <Rule 1, tokens 1 .. 1>
@@ -270437,33 +270461,56 @@
     'a' <tokens 1 .. 1>
 
 syntax is ambiguous
-./glr-regression.at:738: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-732. glr-regression.at:738:  ok
+./glr-regression.at:671: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+730. glr-regression.at:671:  ok
 
-./glr-regression.at:740: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr6 glr-regr6.cc $LIBS
-735. glr-regression.at:843: testing Duplicated user destructor for lookahead: glr.c ...
-./glr-regression.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr7.c glr-regr7.y
-./glr-regression.at:843: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr7 glr-regr7.c $LIBS
 stderr:
+
 stdout:
 ./c++.at:859:  $PREPARSER ./input
 stderr:
 ./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:860: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -o input.cc input.yy
+734. glr-regression.at:740: testing User destructor after an error during a split parse: glr2.cc ...
+./glr-regression.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr6.cc glr-regr6.y
+735. glr-regression.at:843: testing Duplicated user destructor for lookahead: glr.c ...
+./glr-regression.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr7.c glr-regr7.y
 ======== Testing with C++ standard flags: ''
 ./c++.at:860: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+./glr-regression.at:843: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr7 glr-regr7.c $LIBS
+./glr-regression.at:740: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr6 glr-regr6.cc $LIBS
 stderr:
 stdout:
-./glr-regression.at:594:  $PREPARSER ./glr-regr4
+./glr-regression.at:738:  $PREPARSER ./glr-regr6
 stderr:
-./glr-regression.at:594: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-725. glr-regression.at:594:  ok
+Ambiguity detected.
+Option 1,
+  start -> <Rule 1, tokens 1 .. 1>
+    'a' <tokens 1 .. 1>
+
+Option 2,
+  start -> <Rule 2, tokens 1 .. 1>
+    'a' <tokens 1 .. 1>
+
+syntax is ambiguous
+./glr-regression.at:738: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+732. glr-regression.at:738:  ok
 
 736. glr-regression.at:844: testing Duplicated user destructor for lookahead: glr.cc ...
 ./glr-regression.at:844: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr7.cc glr-regr7.y
 ./glr-regression.at:844: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr7 glr-regr7.cc $LIBS
 stderr:
 stdout:
+./glr-regression.at:843:  $PREPARSER ./glr-regr7
+stderr:
+memory exhausted
+./glr-regression.at:843: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+735. glr-regression.at:843:  ok
+
+stderr:
+stdout:
+737. glr-regression.at:845: testing Duplicated user destructor for lookahead: glr2.cc ...
+./glr-regression.at:845: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr7.cc glr-regr7.y
 ./glr-regression.at:739:  $PREPARSER ./glr-regr6
 stderr:
 Ambiguity detected.
@@ -270478,17 +270525,7 @@
 syntax is ambiguous
 ./glr-regression.at:739: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 733. glr-regression.at:739:  ok
-
-737. glr-regression.at:845: testing Duplicated user destructor for lookahead: glr2.cc ...
-./glr-regression.at:845: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr7.cc glr-regr7.y
 ./glr-regression.at:845: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr7 glr-regr7.cc $LIBS
-stderr:
-stdout:
-./glr-regression.at:843:  $PREPARSER ./glr-regr7
-stderr:
-memory exhausted
-./glr-regression.at:843: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-735. glr-regression.at:843:  ok
 
 738. glr-regression.at:944: testing Incorrectly initialized location for empty right-hand side in GLR: glr.c ...
 ./glr-regression.at:944: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr8.c glr-regr8.y
@@ -270505,34 +270542,6 @@
 ./glr-regression.at:945: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr8 glr-regr8.cc $LIBS
 stderr:
 stdout:
-./c++.at:860:  $PREPARSER ./input
-stderr:
-./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:860: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./glr-regression.at:944:  $PREPARSER ./glr-regr8
-stderr:
-./glr-regression.at:944: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-738. glr-regression.at:944:  ok
-
-740. glr-regression.at:946: testing Incorrectly initialized location for empty right-hand side in GLR: glr2.cc ...
-./glr-regression.at:946: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr8.cc glr-regr8.y
-./glr-regression.at:946: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr8 glr-regr8.cc $LIBS
-stderr:
-stdout:
-./glr-regression.at:844:  $PREPARSER ./glr-regr7
-stderr:
-memory exhausted
-./glr-regression.at:844: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-736. glr-regression.at:844:  ok
-
-741. glr-regression.at:1036: testing No users destructors if stack 0 deleted: glr.c ...
-./glr-regression.at:1036: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr9.c glr-regr9.y
-./glr-regression.at:1036: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr9 glr-regr9.c $LIBS
-stderr:
-stdout:
 ./c++.at:1363:  $PREPARSER ./input aaaas
 stderr:
 exception caught: reduction
@@ -270550,210 +270559,217 @@
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./c++.at:1363:  $PREPARSER ./input --debug aaaap
 stderr:
+stdout:
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-0xffffd3f45b00->Object::Object { }
-0xffffd3f45bf0->Object::Object { 0xffffd3f45b00 }
-0xffffd3f45b00->Object::~Object { 0xffffd3f45b00, 0xffffd3f45bf0 }
-Next token is token 'a' (0xffffd3f45bf0 'a')
-0xffffd3f45b10->Object::Object { 0xffffd3f45bf0 }
-0xffffd3f45bf0->Object::~Object { 0xffffd3f45b10, 0xffffd3f45bf0 }
-Shifting token 'a' (0xffffd3f45b10 'a')
-0xaaab108b0ee0->Object::Object { 0xffffd3f45b10 }
-0xffffd3f45b10->Object::~Object { 0xaaab108b0ee0, 0xffffd3f45b10 }
+0xffffc3263b70->Object::Object { }
+0xffffc3263c60->Object::Object { 0xffffc3263b70 }
+0xffffc3263b70->Object::~Object { 0xffffc3263b70, 0xffffc3263c60 }
+Next token is token 'a' (0xffffc3263c60 'a')
+0xffffc3263b80->Object::Object { 0xffffc3263c60 }
+0xffffc3263c60->Object::~Object { 0xffffc3263b80, 0xffffc3263c60 }
+Shifting token 'a' (0xffffc3263b80 'a')
+0xaaaaf4466ee0->Object::Object { 0xffffc3263b80 }
+0xffffc3263b80->Object::~Object { 0xaaaaf4466ee0, 0xffffc3263b80 }
 Entering state 1
 Stack now 0 1
-0xffffd3f45c10->Object::Object { 0xaaab108b0ee0 }
+0xffffc3263c80->Object::Object { 0xaaaaf4466ee0 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab108b0ee0 'a')
--> $$ = nterm item (0xffffd3f45c10 'a')
-0xaaab108b0ee0->Object::~Object { 0xaaab108b0ee0, 0xffffd3f45c10 }
-0xaaab108b0ee0->Object::Object { 0xffffd3f45c10 }
-0xffffd3f45c10->Object::~Object { 0xaaab108b0ee0, 0xffffd3f45c10 }
+   $1 = token 'a' (0xaaaaf4466ee0 'a')
+-> $$ = nterm item (0xffffc3263c80 'a')
+0xaaaaf4466ee0->Object::~Object { 0xaaaaf4466ee0, 0xffffc3263c80 }
+0xaaaaf4466ee0->Object::Object { 0xffffc3263c80 }
+0xffffc3263c80->Object::~Object { 0xaaaaf4466ee0, 0xffffc3263c80 }
 Entering state 10
 Stack now 0 10
 Reading a token
-0xffffd3f45b00->Object::Object { 0xaaab108b0ee0 }
-0xffffd3f45bf0->Object::Object { 0xaaab108b0ee0, 0xffffd3f45b00 }
-0xffffd3f45b00->Object::~Object { 0xaaab108b0ee0, 0xffffd3f45b00, 0xffffd3f45bf0 }
-Next token is token 'a' (0xffffd3f45bf0 'a')
-0xffffd3f45b10->Object::Object { 0xaaab108b0ee0, 0xffffd3f45bf0 }
-0xffffd3f45bf0->Object::~Object { 0xaaab108b0ee0, 0xffffd3f45b10, 0xffffd3f45bf0 }
-Shifting token 'a' (0xffffd3f45b10 'a')
-0xaaab108b0f00->Object::Object { 0xaaab108b0ee0, 0xffffd3f45b10 }
-0xffffd3f45b10->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xffffd3f45b10 }
+0xffffc3263b70->Object::Object { 0xaaaaf4466ee0 }
+0xffffc3263c60->Object::Object { 0xaaaaf4466ee0, 0xffffc3263b70 }
+0xffffc3263b70->Object::~Object { 0xaaaaf4466ee0, 0xffffc3263b70, 0xffffc3263c60 }
+Next token is token 'a' (0xffffc3263c60 'a')
+0xffffc3263b80->Object::Object { 0xaaaaf4466ee0, 0xffffc3263c60 }
+0xffffc3263c60->Object::~Object { 0xaaaaf4466ee0, 0xffffc3263b80, 0xffffc3263c60 }
+Shifting token 'a' (0xffffc3263b80 'a')
+0xaaaaf4466f00->Object::Object { 0xaaaaf4466ee0, 0xffffc3263b80 }
+0xffffc3263b80->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xffffc3263b80 }
 Entering state 1
 Stack now 0 10 1
-0xffffd3f45c10->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00 }
+0xffffc3263c80->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab108b0f00 'a')
--> $$ = nterm item (0xffffd3f45c10 'a')
-0xaaab108b0f00->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xffffd3f45c10 }
-0xaaab108b0f00->Object::Object { 0xaaab108b0ee0, 0xffffd3f45c10 }
-0xffffd3f45c10->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xffffd3f45c10 }
+   $1 = token 'a' (0xaaaaf4466f00 'a')
+-> $$ = nterm item (0xffffc3263c80 'a')
+0xaaaaf4466f00->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xffffc3263c80 }
+0xaaaaf4466f00->Object::Object { 0xaaaaf4466ee0, 0xffffc3263c80 }
+0xffffc3263c80->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xffffc3263c80 }
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xffffd3f45b00->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00 }
-0xffffd3f45bf0->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xffffd3f45b00 }
-0xffffd3f45b00->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xffffd3f45b00, 0xffffd3f45bf0 }
-Next token is token 'a' (0xffffd3f45bf0 'a')
-0xffffd3f45b10->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xffffd3f45bf0 }
-0xffffd3f45bf0->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xffffd3f45b10, 0xffffd3f45bf0 }
-Shifting token 'a' (0xffffd3f45b10 'a')
-0xaaab108b0f20->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xffffd3f45b10 }
-0xffffd3f45b10->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xffffd3f45b10 }
+0xffffc3263b70->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00 }
+0xffffc3263c60->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xffffc3263b70 }
+0xffffc3263b70->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xffffc3263b70, 0xffffc3263c60 }
+Next token is token 'a' (0xffffc3263c60 'a')
+0xffffc3263b80->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xffffc3263c60 }
+0xffffc3263c60->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xffffc3263b80, 0xffffc3263c60 }
+Shifting token 'a' (0xffffc3263b80 'a')
+0xaaaaf4466f20->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xffffc3263b80 }
+0xffffc3263b80->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xffffc3263b80 }
 Entering state 1
 Stack now 0 10 10 1
-0xffffd3f45c10->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20 }
+0xffffc3263c80->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab108b0f20 'a')
--> $$ = nterm item (0xffffd3f45c10 'a')
-0xaaab108b0f20->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xffffd3f45c10 }
-0xaaab108b0f20->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xffffd3f45c10 }
-0xffffd3f45c10->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xffffd3f45c10 }
+   $1 = token 'a' (0xaaaaf4466f20 'a')
+-> $$ = nterm item (0xffffc3263c80 'a')
+0xaaaaf4466f20->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xffffc3263c80 }
+0xaaaaf4466f20->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xffffc3263c80 }
+0xffffc3263c80->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xffffc3263c80 }
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xffffd3f45b00->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20 }
-0xffffd3f45bf0->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xffffd3f45b00 }
-0xffffd3f45b00->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xffffd3f45b00, 0xffffd3f45bf0 }
-Next token is token 'a' (0xffffd3f45bf0 'a')
-0xffffd3f45b10->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xffffd3f45bf0 }
-0xffffd3f45bf0->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xffffd3f45b10, 0xffffd3f45bf0 }
-Shifting token 'a' (0xffffd3f45b10 'a')
-0xaaab108b0f40->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xffffd3f45b10 }
-0xffffd3f45b10->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xaaab108b0f40, 0xffffd3f45b10 }
+0xffffc3263b70->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20 }
+0xffffc3263c60->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xffffc3263b70 }
+0xffffc3263b70->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xffffc3263b70, 0xffffc3263c60 }
+Next token is token 'a' (0xffffc3263c60 'a')
+0xffffc3263b80->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xffffc3263c60 }
+0xffffc3263c60->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xffffc3263b80, 0xffffc3263c60 }
+Shifting token 'a' (0xffffc3263b80 'a')
+0xaaaaf4466f40->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xffffc3263b80 }
+0xffffc3263b80->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xaaaaf4466f40, 0xffffc3263b80 }
 Entering state 1
 Stack now 0 10 10 10 1
-0xffffd3f45c10->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xaaab108b0f40 }
+0xffffc3263c80->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xaaaaf4466f40 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab108b0f40 'a')
--> $$ = nterm item (0xffffd3f45c10 'a')
-0xaaab108b0f40->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xaaab108b0f40, 0xffffd3f45c10 }
-0xaaab108b0f40->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xffffd3f45c10 }
-0xffffd3f45c10->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xaaab108b0f40, 0xffffd3f45c10 }
+   $1 = token 'a' (0xaaaaf4466f40 'a')
+-> $$ = nterm item (0xffffc3263c80 'a')
+0xaaaaf4466f40->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xaaaaf4466f40, 0xffffc3263c80 }
+0xaaaaf4466f40->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xffffc3263c80 }
+0xffffc3263c80->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xaaaaf4466f40, 0xffffc3263c80 }
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xffffd3f45b00->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xaaab108b0f40 }
-0xffffd3f45bf0->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xaaab108b0f40, 0xffffd3f45b00 }
-0xffffd3f45b00->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xaaab108b0f40, 0xffffd3f45b00, 0xffffd3f45bf0 }
-Next token is token 'p' (0xffffd3f45bf0 'p'Exception caught: cleaning lookahead and stack
-0xaaab108b0f40->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xaaab108b0f40, 0xffffd3f45bf0 }
-0xaaab108b0f20->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xffffd3f45bf0 }
-0xaaab108b0f00->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xffffd3f45bf0 }
-0xaaab108b0ee0->Object::~Object { 0xaaab108b0ee0, 0xffffd3f45bf0 }
-0xffffd3f45bf0->Object::~Object { 0xffffd3f45bf0 }
+0xffffc3263b70->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xaaaaf4466f40 }
+0xffffc3263c60->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xaaaaf4466f40, 0xffffc3263b70 }
+0xffffc3263b70->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xaaaaf4466f40, 0xffffc3263b70, 0xffffc3263c60 }
+Next token is token 'p' (0xffffc3263c60 'p'Exception caught: cleaning lookahead and stack
+0xaaaaf4466f40->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xaaaaf4466f40, 0xffffc3263c60 }
+0xaaaaf4466f20->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xffffc3263c60 }
+0xaaaaf4466f00->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xffffc3263c60 }
+0xaaaaf4466ee0->Object::~Object { 0xaaaaf4466ee0, 0xffffc3263c60 }
+0xffffc3263c60->Object::~Object { 0xffffc3263c60 }
 exception caught: printer
 end { }
+./c++.at:860:  $PREPARSER ./input
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stderr:
+stderr:
 Starting parse
 Entering state 0
 Stack now 0
 Reading a token
-0xffffd3f45b00->Object::Object { }
-0xffffd3f45bf0->Object::Object { 0xffffd3f45b00 }
-0xffffd3f45b00->Object::~Object { 0xffffd3f45b00, 0xffffd3f45bf0 }
-Next token is token 'a' (0xffffd3f45bf0 'a')
-0xffffd3f45b10->Object::Object { 0xffffd3f45bf0 }
-0xffffd3f45bf0->Object::~Object { 0xffffd3f45b10, 0xffffd3f45bf0 }
-Shifting token 'a' (0xffffd3f45b10 'a')
-0xaaab108b0ee0->Object::Object { 0xffffd3f45b10 }
-0xffffd3f45b10->Object::~Object { 0xaaab108b0ee0, 0xffffd3f45b10 }
+0xffffc3263b70->Object::Object { }
+0xffffc3263c60->Object::Object { 0xffffc3263b70 }
+0xffffc3263b70->Object::~Object { 0xffffc3263b70, 0xffffc3263c60 }
+Next token is token 'a' (0xffffc3263c60 'a')
+0xffffc3263b80->Object::Object { 0xffffc3263c60 }
+0xffffc3263c60->Object::~Object { 0xffffc3263b80, 0xffffc3263c60 }
+Shifting token 'a' (0xffffc3263b80 'a')
+0xaaaaf4466ee0->Object::Object { 0xffffc3263b80 }
+0xffffc3263b80->Object::~Object { 0xaaaaf4466ee0, 0xffffc3263b80 }
 Entering state 1
 Stack now 0 1
-0xffffd3f45c10->Object::Object { 0xaaab108b0ee0 }
+0xffffc3263c80->Object::Object { 0xaaaaf4466ee0 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab108b0ee0 'a')
--> $$ = nterm item (0xffffd3f45c10 'a')
-0xaaab108b0ee0->Object::~Object { 0xaaab108b0ee0, 0xffffd3f45c10 }
-0xaaab108b0ee0->Object::Object { 0xffffd3f45c10 }
-0xffffd3f45c10->Object::~Object { 0xaaab108b0ee0, 0xffffd3f45c10 }
+   $1 = token 'a' (0xaaaaf4466ee0 'a')
+-> $$ = nterm item (0xffffc3263c80 'a')
+0xaaaaf4466ee0->Object::~Object { 0xaaaaf4466ee0, 0xffffc3263c80 }
+0xaaaaf4466ee0->Object::Object { 0xffffc3263c80 }
+0xffffc3263c80->Object::~Object { 0xaaaaf4466ee0, 0xffffc3263c80 }
 Entering state 10
 Stack now 0 10
 Reading a token
-0xffffd3f45b00->Object::Object { 0xaaab108b0ee0 }
-0xffffd3f45bf0->Object::Object { 0xaaab108b0ee0, 0xffffd3f45b00 }
-0xffffd3f45b00->Object::~Object { 0xaaab108b0ee0, 0xffffd3f45b00, 0xffffd3f45bf0 }
-Next token is token 'a' (0xffffd3f45bf0 'a')
-0xffffd3f45b10->Object::Object { 0xaaab108b0ee0, 0xffffd3f45bf0 }
-0xffffd3f45bf0->Object::~Object { 0xaaab108b0ee0, 0xffffd3f45b10, 0xffffd3f45bf0 }
-Shifting token 'a' (0xffffd3f45b10 'a')
-0xaaab108b0f00->Object::Object { 0xaaab108b0ee0, 0xffffd3f45b10 }
-0xffffd3f45b10->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xffffd3f45b10 }
+0xffffc3263b70->Object::Object { 0xaaaaf4466ee0 }
+0xffffc3263c60->Object::Object { 0xaaaaf4466ee0, 0xffffc3263b70 }
+0xffffc3263b70->Object::~Object { 0xaaaaf4466ee0, 0xffffc3263b70, 0xffffc3263c60 }
+Next token is token 'a' (0xffffc3263c60 'a')
+0xffffc3263b80->Object::Object { 0xaaaaf4466ee0, 0xffffc3263c60 }
+0xffffc3263c60->Object::~Object { 0xaaaaf4466ee0, 0xffffc3263b80, 0xffffc3263c60 }
+Shifting token 'a' (0xffffc3263b80 'a')
+0xaaaaf4466f00->Object::Object { 0xaaaaf4466ee0, 0xffffc3263b80 }
+0xffffc3263b80->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xffffc3263b80 }
 Entering state 1
 Stack now 0 10 1
-0xffffd3f45c10->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00 }
+0xffffc3263c80->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab108b0f00 'a')
--> $$ = nterm item (0xffffd3f45c10 'a')
-0xaaab108b0f00->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xffffd3f45c10 }
-0xaaab108b0f00->Object::Object { 0xaaab108b0ee0, 0xffffd3f45c10 }
-0xffffd3f45c10->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xffffd3f45c10 }
+   $1 = token 'a' (0xaaaaf4466f00 'a')
+-> $$ = nterm item (0xffffc3263c80 'a')
+0xaaaaf4466f00->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xffffc3263c80 }
+0xaaaaf4466f00->Object::Object { 0xaaaaf4466ee0, 0xffffc3263c80 }
+0xffffc3263c80->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xffffc3263c80 }
 Entering state 10
 Stack now 0 10 10
 Reading a token
-0xffffd3f45b00->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00 }
-0xffffd3f45bf0->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xffffd3f45b00 }
-0xffffd3f45b00->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xffffd3f45b00, 0xffffd3f45bf0 }
-Next token is token 'a' (0xffffd3f45bf0 'a')
-0xffffd3f45b10->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xffffd3f45bf0 }
-0xffffd3f45bf0->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xffffd3f45b10, 0xffffd3f45bf0 }
-Shifting token 'a' (0xffffd3f45b10 'a')
-0xaaab108b0f20->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xffffd3f45b10 }
-0xffffd3f45b10->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xffffd3f45b10 }
+0xffffc3263b70->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00 }
+0xffffc3263c60->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xffffc3263b70 }
+0xffffc3263b70->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xffffc3263b70, 0xffffc3263c60 }
+Next token is token 'a' (0xffffc3263c60 'a')
+0xffffc3263b80->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xffffc3263c60 }
+0xffffc3263c60->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xffffc3263b80, 0xffffc3263c60 }
+Shifting token 'a' (0xffffc3263b80 'a')
+0xaaaaf4466f20->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xffffc3263b80 }
+0xffffc3263b80->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xffffc3263b80 }
 Entering state 1
 Stack now 0 10 10 1
-0xffffd3f45c10->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20 }
+0xffffc3263c80->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab108b0f20 'a')
--> $$ = nterm item (0xffffd3f45c10 'a')
-0xaaab108b0f20->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xffffd3f45c10 }
-0xaaab108b0f20->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xffffd3f45c10 }
-0xffffd3f45c10->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xffffd3f45c10 }
+   $1 = token 'a' (0xaaaaf4466f20 'a')
+-> $$ = nterm item (0xffffc3263c80 'a')
+0xaaaaf4466f20->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xffffc3263c80 }
+0xaaaaf4466f20->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xffffc3263c80 }
+0xffffc3263c80->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xffffc3263c80 }
 Entering state 10
 Stack now 0 10 10 10
 Reading a token
-0xffffd3f45b00->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20 }
-0xffffd3f45bf0->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xffffd3f45b00 }
-0xffffd3f45b00->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xffffd3f45b00, 0xffffd3f45bf0 }
-Next token is token 'a' (0xffffd3f45bf0 'a')
-0xffffd3f45b10->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xffffd3f45bf0 }
-0xffffd3f45bf0->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xffffd3f45b10, 0xffffd3f45bf0 }
-Shifting token 'a' (0xffffd3f45b10 'a')
-0xaaab108b0f40->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xffffd3f45b10 }
-0xffffd3f45b10->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xaaab108b0f40, 0xffffd3f45b10 }
+0xffffc3263b70->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20 }
+0xffffc3263c60->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xffffc3263b70 }
+0xffffc3263b70->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xffffc3263b70, 0xffffc3263c60 }
+Next token is token 'a' (0xffffc3263c60 'a')
+0xffffc3263b80->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xffffc3263c60 }
+0xffffc3263c60->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xffffc3263b80, 0xffffc3263c60 }
+Shifting token 'a' (0xffffc3263b80 'a')
+0xaaaaf4466f40->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xffffc3263b80 }
+0xffffc3263b80->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xaaaaf4466f40, 0xffffc3263b80 }
 Entering state 1
 Stack now 0 10 10 10 1
-0xffffd3f45c10->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xaaab108b0f40 }
+0xffffc3263c80->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xaaaaf4466f40 }
 Reducing stack by rule 4 (line 142):
-   $1 = token 'a' (0xaaab108b0f40 'a')
--> $$ = nterm item (0xffffd3f45c10 'a')
-0xaaab108b0f40->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xaaab108b0f40, 0xffffd3f45c10 }
-0xaaab108b0f40->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xffffd3f45c10 }
-0xffffd3f45c10->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xaaab108b0f40, 0xffffd3f45c10 }
+   $1 = token 'a' (0xaaaaf4466f40 'a')
+-> $$ = nterm item (0xffffc3263c80 'a')
+0xaaaaf4466f40->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xaaaaf4466f40, 0xffffc3263c80 }
+0xaaaaf4466f40->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xffffc3263c80 }
+0xffffc3263c80->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xaaaaf4466f40, 0xffffc3263c80 }
 Entering state 10
 Stack now 0 10 10 10 10
 Reading a token
-0xffffd3f45b00->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xaaab108b0f40 }
-0xffffd3f45bf0->Object::Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xaaab108b0f40, 0xffffd3f45b00 }
-0xffffd3f45b00->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xaaab108b0f40, 0xffffd3f45b00, 0xffffd3f45bf0 }
-Next token is token 'p' (0xffffd3f45bf0 'p'Exception caught: cleaning lookahead and stack
-0xaaab108b0f40->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xaaab108b0f40, 0xffffd3f45bf0 }
-0xaaab108b0f20->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xaaab108b0f20, 0xffffd3f45bf0 }
-0xaaab108b0f00->Object::~Object { 0xaaab108b0ee0, 0xaaab108b0f00, 0xffffd3f45bf0 }
-0xaaab108b0ee0->Object::~Object { 0xaaab108b0ee0, 0xffffd3f45bf0 }
-0xffffd3f45bf0->Object::~Object { 0xffffd3f45bf0 }
+0xffffc3263b70->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xaaaaf4466f40 }
+0xffffc3263c60->Object::Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xaaaaf4466f40, 0xffffc3263b70 }
+0xffffc3263b70->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xaaaaf4466f40, 0xffffc3263b70, 0xffffc3263c60 }
+Next token is token 'p' (0xffffc3263c60 'p'Exception caught: cleaning lookahead and stack
+0xaaaaf4466f40->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xaaaaf4466f40, 0xffffc3263c60 }
+0xaaaaf4466f20->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xaaaaf4466f20, 0xffffc3263c60 }
+0xaaaaf4466f00->Object::~Object { 0xaaaaf4466ee0, 0xaaaaf4466f00, 0xffffc3263c60 }
+0xaaaaf4466ee0->Object::~Object { 0xaaaaf4466ee0, 0xffffc3263c60 }
+0xffffc3263c60->Object::~Object { 0xffffc3263c60 }
 exception caught: printer
 end { }
 ./c++.at:1363: grep '^exception caught: printer$' stderr
+./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 stdout:
 exception caught: printer
 ./c++.at:1363:  $PREPARSER ./input aaaae
+======== Testing with C++ standard flags: ''
+./c++.at:860: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 exception caught: syntax error
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
@@ -270769,18 +270785,11 @@
 ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 688. c++.at:1363:  ok
 
+740. glr-regression.at:946: testing Incorrectly initialized location for empty right-hand side in GLR: glr2.cc ...
+./glr-regression.at:946: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr8.cc glr-regr8.y
+./glr-regression.at:946: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr8 glr-regr8.cc $LIBS
 stderr:
 stdout:
-./c++.at:860:  $PREPARSER ./input
-stderr:
-./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:860: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-742. glr-regression.at:1037: testing No users destructors if stack 0 deleted: glr.cc ...
-./glr-regression.at:1037: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr9.cc glr-regr9.y
-stderr:
-./glr-regression.at:1037: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr9 glr-regr9.cc $LIBS
-stdout:
 ./glr-regression.at:672:  $PREPARSER ./glr-regr5
 stderr:
 Ambiguity detected.
@@ -270796,20 +270805,37 @@
 ./glr-regression.at:672: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 731. glr-regression.at:672:  ok
 
+stderr:
+stdout:
+./glr-regression.at:844:  $PREPARSER ./glr-regr7
+stderr:
+memory exhausted
+./glr-regression.at:844: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stdout:
+./glr-regression.at:944:  $PREPARSER ./glr-regr8
+stderr:
+736. glr-regression.at:844:  ok
+./glr-regression.at:944: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+738. glr-regression.at:944:  ok
+
+741. glr-regression.at:1036: testing No users destructors if stack 0 deleted: glr.c ...
+
+./glr-regression.at:1036: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr9.c glr-regr9.y
 743. glr-regression.at:1038: testing No users destructors if stack 0 deleted: glr2.cc ...
 ./glr-regression.at:1038: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr9.cc glr-regr9.y
+./glr-regression.at:1036: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr9 glr-regr9.c $LIBS
+742. glr-regression.at:1037: testing No users destructors if stack 0 deleted: glr.cc ...
+./glr-regression.at:1037: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr9.cc glr-regr9.y
 ./glr-regression.at:1038: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr9 glr-regr9.cc $LIBS
+./glr-regression.at:1037: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr9 glr-regr9.cc $LIBS
 stderr:
 stdout:
-./glr-regression.at:1036:  $PREPARSER ./glr-regr9
+./c++.at:860:  $PREPARSER ./input
 stderr:
-memory exhausted
-./glr-regression.at:1036: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-741. glr-regression.at:1036:  ok
-
-744. glr-regression.at:1102: testing Corrupted semantic options if user action cuts parse: glr.c ...
-./glr-regression.at:1102: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr10.c glr-regr10.y
-./glr-regression.at:1102: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr10 glr-regr10.c $LIBS
+./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:860: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./glr-regression.at:945:  $PREPARSER ./glr-regr8
@@ -270817,13 +270843,17 @@
 ./glr-regression.at:945: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 739. glr-regression.at:945:  ok
 
+744. glr-regression.at:1102: testing Corrupted semantic options if user action cuts parse: glr.c ...
+./glr-regression.at:1102: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr10.c glr-regr10.y
+./glr-regression.at:1102: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr10 glr-regr10.c $LIBS
 stderr:
 stdout:
-./c++.at:860:  $PREPARSER ./input
+./glr-regression.at:1036:  $PREPARSER ./glr-regr9
 stderr:
-./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:860: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+memory exhausted
+./glr-regression.at:1036: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+741. glr-regression.at:1036:  ok
+
 745. glr-regression.at:1103: testing Corrupted semantic options if user action cuts parse: glr.cc ...
 ./glr-regression.at:1103: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr10.cc glr-regr10.y
 ./glr-regression.at:1103: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr10 glr-regr10.cc $LIBS
@@ -270849,6 +270879,20 @@
 ./glr-regression.at:1104: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr10 glr-regr10.cc $LIBS
 stderr:
 stdout:
+./c++.at:860:  $PREPARSER ./input
+stderr:
+./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:860: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./glr-regression.at:1102:  $PREPARSER ./glr-regr10
+stderr:
+./glr-regression.at:1102: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+744. glr-regression.at:1102:  ok
+
+stderr:
+stdout:
 ./glr-regression.at:1037:  $PREPARSER ./glr-regr9
 stderr:
 memory exhausted
@@ -270858,13 +270902,6 @@
 747. glr-regression.at:1174: testing Undesirable destructors if user action cuts parse: glr.c ...
 ./glr-regression.at:1174: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr11.c glr-regr11.y
 ./glr-regression.at:1174: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr11 glr-regr11.c $LIBS
-stderr:
-stdout:
-./glr-regression.at:1102:  $PREPARSER ./glr-regr10
-stderr:
-./glr-regression.at:1102: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-744. glr-regression.at:1102:  ok
-
 748. glr-regression.at:1175: testing Undesirable destructors if user action cuts parse: glr.cc ...
 ./glr-regression.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr11.cc glr-regr11.y
 ./glr-regression.at:1175: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr11 glr-regr11.cc $LIBS
@@ -270893,64 +270930,64 @@
 ./glr-regression.at:1174: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 747. glr-regression.at:1174:  ok
 
-750. glr-regression.at:1310: testing Leaked semantic values if user action cuts parse: glr.c ...
-./glr-regression.at:1310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr12.c glr-regr12.y
-./glr-regression.at:1310: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr12 glr-regr12.c $LIBS
 stderr:
 stdout:
 ./glr-regression.at:1103:  $PREPARSER ./glr-regr10
 stderr:
 ./glr-regression.at:1103: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 745. glr-regression.at:1103:  ok
+750. glr-regression.at:1310: testing Leaked semantic values if user action cuts parse: glr.c ...
+./glr-regression.at:1310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr12.c glr-regr12.y
 
+./glr-regression.at:1310: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr12 glr-regr12.c $LIBS
 751. glr-regression.at:1311: testing Leaked semantic values if user action cuts parse: glr.cc ...
 ./glr-regression.at:1311: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr12.cc glr-regr12.y
 ./glr-regression.at:1311: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr12 glr-regr12.cc $LIBS
 stderr:
 stdout:
+./glr-regression.at:946:  $PREPARSER ./glr-regr8
 stderr:
-./c++.at:860:  $PREPARSER ./input
-stdout:
-./glr-regression.at:1175:  $PREPARSER ./glr-regr11
-stderr:
-stderr:
-./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./glr-regression.at:1175: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:860: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-748. glr-regression.at:1175:  ok
+./glr-regression.at:946: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+740. glr-regression.at:946:  ok
 
 752. glr-regression.at:1312: testing Leaked semantic values if user action cuts parse: glr2.cc ...
 ./glr-regression.at:1312: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr12.cc glr-regr12.y
 ./glr-regression.at:1312: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr12 glr-regr12.cc $LIBS
 stderr:
 stdout:
-./glr-regression.at:1310:  $PREPARSER ./glr-regr12
+./glr-regression.at:1175:  $PREPARSER ./glr-regr11
 stderr:
-./glr-regression.at:1310: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-750. glr-regression.at:1310:  ok
+./glr-regression.at:1175: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+748. glr-regression.at:1175:  ok
 
 753. glr-regression.at:1445: testing Incorrect lookahead during deterministic GLR: glr.c ...
 ./glr-regression.at:1445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr13.c glr-regr13.y
 ./glr-regression.at:1445: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr13 glr-regr13.c $LIBS
 stderr:
-stderr:
-stdout:
 stdout:
 ./glr-regression.at:1038:  $PREPARSER ./glr-regr9
-./glr-regression.at:946:  $PREPARSER ./glr-regr8
 stderr:
 memory exhausted
 ./glr-regression.at:1038: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-stderr:
-./glr-regression.at:946: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 743. glr-regression.at:1038:  ok
 
-740. glr-regression.at:946:  ok
 754. glr-regression.at:1446: testing Incorrect lookahead during deterministic GLR: glr.cc ...
 ./glr-regression.at:1446: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr13.cc glr-regr13.y
-
+stderr:
+stdout:
+./c++.at:860:  $PREPARSER ./input
+stderr:
+./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:860: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 ./glr-regression.at:1446: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr13 glr-regr13.cc $LIBS
+stderr:
+stdout:
+./glr-regression.at:1310:  $PREPARSER ./glr-regr12
+stderr:
+./glr-regression.at:1310: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+750. glr-regression.at:1310:  ok
+
 755. glr-regression.at:1447: testing Incorrect lookahead during deterministic GLR: glr2.cc ...
 ./glr-regression.at:1447: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr13.cc glr-regr13.y
 ./glr-regression.at:1447: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr13 glr-regr13.cc $LIBS
@@ -270963,73 +271000,66 @@
 
 756. glr-regression.at:1678: testing Incorrect lookahead during nondeterministic GLR: glr.c ...
 ./glr-regression.at:1678: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr14.c glr-regr14.y
-./glr-regression.at:1678: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr14 glr-regr14.c $LIBS
-stderr:
-stdout:
-./c++.at:860:  $PREPARSER ./input
-stderr:
-./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:860: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./glr-regression.at:1445:  $PREPARSER ./glr-regr13
 stderr:
+./glr-regression.at:1678: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr14 glr-regr14.c $LIBS
 ./glr-regression.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 753. glr-regression.at:1445:  ok
 
-757. glr-regression.at:1679: testing Incorrect lookahead during nondeterministic GLR: glr.cc ...
-./glr-regression.at:1679: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr14.cc glr-regr14.y
 stderr:
 stdout:
 ./glr-regression.at:1104:  $PREPARSER ./glr-regr10
 stderr:
 ./glr-regression.at:1104: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+757. glr-regression.at:1679: testing Incorrect lookahead during nondeterministic GLR: glr.cc ...
+./glr-regression.at:1679: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr14.cc glr-regr14.y
 746. glr-regression.at:1104:  ok
 
+./glr-regression.at:1679: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr14 glr-regr14.cc $LIBS
 758. glr-regression.at:1680: testing Incorrect lookahead during nondeterministic GLR: glr2.cc ...
 ./glr-regression.at:1680: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr14.cc glr-regr14.y
 ./glr-regression.at:1680: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr14 glr-regr14.cc $LIBS
-./glr-regression.at:1679: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr14 glr-regr14.cc $LIBS
 stderr:
 stdout:
-./glr-regression.at:1678:  $PREPARSER ./glr-regr14
+./c++.at:860:  $PREPARSER ./input
 stderr:
-./glr-regression.at:1678: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-756. glr-regression.at:1678:  ok
+./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:860: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./glr-regression.at:1176:  $PREPARSER ./glr-regr11
+stderr:
+./glr-regression.at:1176: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+749. glr-regression.at:1176:  ok
 
+759. glr-regression.at:1785: testing Leaked semantic values when reporting ambiguity: glr.c ...
+./glr-regression.at:1785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr15.c glr-regr15.y
+./glr-regression.at:1785: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr15 glr-regr15.c $LIBS
 stderr:
 stdout:
 ./glr-regression.at:1446:  $PREPARSER ./glr-regr13
 stderr:
 ./glr-regression.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 754. glr-regression.at:1446:  ok
-759. glr-regression.at:1785: testing Leaked semantic values when reporting ambiguity: glr.c ...
-./glr-regression.at:1785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr15.c glr-regr15.y
 
 760. glr-regression.at:1786: testing Leaked semantic values when reporting ambiguity: glr.cc ...
 ./glr-regression.at:1786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr15.cc glr-regr15.y
-./glr-regression.at:1785: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr15 glr-regr15.c $LIBS
 ./glr-regression.at:1786: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr15 glr-regr15.cc $LIBS
 stderr:
 stdout:
-./glr-regression.at:1176:  $PREPARSER ./glr-regr11
+./glr-regression.at:1678:  $PREPARSER ./glr-regr14
 stderr:
-./glr-regression.at:1176: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-749. glr-regression.at:1176:  ok
+./glr-regression.at:1678: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+756. glr-regression.at:1678:  ok
 
 761. glr-regression.at:1787: testing Leaked semantic values when reporting ambiguity: glr2.cc ...
 ./glr-regression.at:1787: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr15.cc glr-regr15.y
 ./glr-regression.at:1787: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr15 glr-regr15.cc $LIBS
 stderr:
 stdout:
-./c++.at:860:  $PREPARSER ./input
-stderr:
-./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-======== Testing with C++ standard flags: ''
-./c++.at:860: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
 ./glr-regression.at:1785:  $PREPARSER ./glr-regr15
 stderr:
 Ambiguity detected.
@@ -271043,27 +271073,34 @@
 
 syntax is ambiguous
 ./glr-regression.at:1785: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+stderr:
+stdout:
 759. glr-regression.at:1785:  ok
+./c++.at:860:  $PREPARSER ./input
+stderr:
+./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+======== Testing with C++ standard flags: ''
+./c++.at:860: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 
 762. glr-regression.at:1860: testing Leaked lookahead after nondeterministic parse syntax error: glr.c ...
 ./glr-regression.at:1860: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr16.c glr-regr16.y
-./glr-regression.at:1860: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr16 glr-regr16.c $LIBS
 stderr:
+./glr-regression.at:1860: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o glr-regr16 glr-regr16.c $LIBS
 stdout:
-./glr-regression.at:1679:  $PREPARSER ./glr-regr14
+./glr-regression.at:1312:  $PREPARSER ./glr-regr12
 stderr:
-./glr-regression.at:1679: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-757. glr-regression.at:1679:  ok
+./glr-regression.at:1312: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+752. glr-regression.at:1312:  ok
 
 763. glr-regression.at:1861: testing Leaked lookahead after nondeterministic parse syntax error: glr.cc ...
 ./glr-regression.at:1861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr16.cc glr-regr16.y
 ./glr-regression.at:1861: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr16 glr-regr16.cc $LIBS
 stderr:
 stdout:
-./glr-regression.at:1312:  $PREPARSER ./glr-regr12
+./glr-regression.at:1679:  $PREPARSER ./glr-regr14
 stderr:
-./glr-regression.at:1312: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-752. glr-regression.at:1312:  ok
+./glr-regression.at:1679: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+757. glr-regression.at:1679:  ok
 
 764. glr-regression.at:1862: testing Leaked lookahead after nondeterministic parse syntax error: glr2.cc ...
 ./glr-regression.at:1862: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr16.cc glr-regr16.y
@@ -271096,41 +271133,59 @@
 ./glr-regression.at:1860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 762. glr-regression.at:1860:  ok
 
-766. glr-regression.at:1965: testing Uninitialized location when reporting ambiguity: glr.cc ...
-./glr-regression.at:1965: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr17.cc glr-regr17.y
-./glr-regression.at:1965: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr17 glr-regr17.cc $LIBS
 stderr:
 stdout:
 ./glr-regression.at:1447:  $PREPARSER ./glr-regr13
 stderr:
 ./glr-regression.at:1447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 755. glr-regression.at:1447:  ok
+766. glr-regression.at:1965: testing Uninitialized location when reporting ambiguity: glr.cc ...
+./glr-regression.at:1965: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr17.cc glr-regr17.y
 
+./glr-regression.at:1965: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o glr-regr17 glr-regr17.cc $LIBS
 767. glr-regression.at:1966: testing Uninitialized location when reporting ambiguity: glr2.cc ...
 ./glr-regression.at:1966: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret -rall -o glr-regr17.cc glr-regr17.y
 ./glr-regression.at:1966: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr17 glr-regr17.cc $LIBS
 stderr:
 stdout:
+./glr-regression.at:1861:  $PREPARSER ./glr-regr16
+stderr:
+syntax error
+./glr-regression.at:1861: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+763. glr-regression.at:1861:  ok
+
+stderr:
+stdout:
 ./c++.at:860:  $PREPARSER ./input
 stderr:
 ./c++.at:860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 679. c++.at:854:  ok
 
 768. glr-regression.at:2035: testing Missed %merge type warnings when LHS type is declared later: glr.c ...
-./glr-regression.at:2035: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y
-768. glr-regression.at:2035:  ok
-
 769. glr-regression.at:2036: testing Missed %merge type warnings when LHS type is declared later: glr.cc ...
 ./glr-regression.at:2036: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y
+./glr-regression.at:2035: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y
 769. glr-regression.at:2036:  ok
+768. glr-regression.at:2035:  ok
+
 
 770. glr-regression.at:2037: testing Missed %merge type warnings when LHS type is declared later: glr2.cc ...
 ./glr-regression.at:2037: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS;  bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y
 770. glr-regression.at:2037:  ok
-
 771. glr-regression.at:2149: testing Ambiguity reports: glr.c ...
 ./glr-regression.at:2149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
+
 ./glr-regression.at:2149: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
+772. glr-regression.at:2150: testing Ambiguity reports: glr.cc ...
+./glr-regression.at:2150: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+./glr-regression.at:2150: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
+stderr:
+stdout:
+./glr-regression.at:1680:  $PREPARSER ./glr-regr14
+stderr:
+./glr-regression.at:1680: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+758. glr-regression.at:1680:  ok
+
 stderr:
 stdout:
 ./glr-regression.at:1964:  $PREPARSER ./glr-regr17
@@ -271157,28 +271212,10 @@
 1.1-2.2: syntax is ambiguous
 ./glr-regression.at:1964: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 765. glr-regression.at:1964:  ok
-
-772. glr-regression.at:2150: testing Ambiguity reports: glr.cc ...
-./glr-regression.at:2150: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./glr-regression.at:2150: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./glr-regression.at:1861:  $PREPARSER ./glr-regr16
-stderr:
-syntax error
-./glr-regression.at:1861: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-763. glr-regression.at:1861:  ok
-
 773. glr-regression.at:2151: testing Ambiguity reports: glr2.cc ...
 ./glr-regression.at:2151: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./glr-regression.at:2151: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
-stderr:
-stdout:
-./glr-regression.at:1680:  $PREPARSER ./glr-regr14
-stderr:
-./glr-regression.at:1680: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-758. glr-regression.at:1680:  ok
 
+./glr-regression.at:2151: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 774. glr-regression.at:2229: testing Predicates: glr.c ...
 ./glr-regression.at:2229: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.c input.y
 ./glr-regression.at:2229: $CC $CFLAGS $CPPFLAGS  $LDFLAGS -o input input.c $LIBS
@@ -271250,7 +271287,6 @@
 
 775. glr-regression.at:2230: testing Predicates: glr.cc ...
 ./glr-regression.at:2230: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
-./glr-regression.at:2230: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./glr-regression.at:1965:  $PREPARSER ./glr-regr17
@@ -271276,13 +271312,16 @@
 
 1.1-2.2: syntax is ambiguous
 ./glr-regression.at:1965: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+./glr-regression.at:2230: $CXX $CPPFLAGS  $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 766. glr-regression.at:1965:  ok
 
+776. glr-regression.at:2231: testing Predicates: glr2.cc ...
+./glr-regression.at:2231: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
+./glr-regression.at:2231: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./glr-regression.at:1787:  $PREPARSER ./glr-regr15
 stderr:
-776. glr-regression.at:2231: testing Predicates: glr2.cc ...
 Ambiguity detected.
 Option 1,
   ambiguity -> <Rule 6, empty>
@@ -271294,9 +271333,7 @@
 
 syntax is ambiguous
 ./glr-regression.at:1787: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-./glr-regression.at:2231: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS;  bison --color=no -fno-caret  -o input.cc input.y
 761. glr-regression.at:1787:  ok
-./glr-regression.at:2231: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS
 stderr:
 stdout:
 ./glr-regression.at:2229:  $PREPARSER ./input Nwin
@@ -271311,11 +271348,10 @@
 ./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./glr-regression.at:2229:  $PREPARSER ./input Nwio
 stderr:
-syntax error, unexpected 'o', expecting 'n'
-./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
-774. glr-regression.at:2229:  ok
 stderr:
 stdout:
+syntax error, unexpected 'o', expecting 'n'
+./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
 ./glr-regression.at:2150:  $PREPARSER ./input --debug
 stderr:
 Starting parse
@@ -271378,6 +271414,7 @@
 Cleanup: popping nterm b ()
 Cleanup: popping token 'a' ()
 ./glr-regression.at:2150: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr
+774. glr-regression.at:2229:  ok
 772. glr-regression.at:2150:  ok
 stderr:
 stdout:
@@ -271697,72 +271734,72 @@
 make[4]: Entering directory '/build/bison-3.8.2+dfsg'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/bin'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/lib/aarch64-linux-gnu'
- /usr/bin/install -c -m 644  lib/liby.a '/build/bison-3.8.2+dfsg/debian/tmp/usr/lib/aarch64-linux-gnu'
-  /usr/bin/install -c src/bison '/build/bison-3.8.2+dfsg/debian/tmp/usr/bin'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/bin'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/aclocal'
- /usr/bin/install -c src/yacc '/build/bison-3.8.2+dfsg/debian/tmp/usr/bin'
- /usr/bin/install -c -m 644 m4/bison-i18n.m4 '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/aclocal'
- ( cd '/build/bison-3.8.2+dfsg/debian/tmp/usr/lib/aarch64-linux-gnu' && ranlib liby.a )
- /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++'
+ /usr/bin/install -c -m 644  lib/liby.a '/build/bison-3.8.2+dfsg/debian/tmp/usr/lib/aarch64-linux-gnu'
+  /usr/bin/install -c src/bison '/build/bison-3.8.2+dfsg/debian/tmp/usr/bin'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++'
- /usr/bin/install -c -m 644 examples/c++/calc++/driver.cc examples/c++/calc++/driver.hh examples/c++/calc++/scanner.ll examples/c++/calc++/calc++.cc examples/c++/calc++/parser.yy '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++'
+ /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++'
+ /usr/bin/install -c src/yacc '/build/bison-3.8.2+dfsg/debian/tmp/usr/bin'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/bistromathic'
- /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c'
+ /usr/bin/install -c -m 644 m4/bison-i18n.m4 '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/aclocal'
  /usr/bin/install -c -m 644 examples/c++/simple.yy '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++'
- /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/calc'
- /usr/bin/install -c -m 644 examples/c/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c'
+ /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c'
  /usr/bin/install -c -m 644 examples/c/bistromathic/parse.y examples/c/bistromathic/Makefile examples/c/bistromathic/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/bistromathic'
- /usr/bin/install -c -m 644 examples/c/calc/calc.y examples/c/calc/Makefile examples/c/calc/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/calc'
+ /usr/bin/install -c -m 644 examples/c++/calc++/driver.cc examples/c++/calc++/driver.hh examples/c++/calc++/scanner.ll examples/c++/calc++/calc++.cc examples/c++/calc++/parser.yy '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++'
+ ( cd '/build/bison-3.8.2+dfsg/debian/tmp/usr/lib/aarch64-linux-gnu' && ranlib liby.a )
+ /usr/bin/install -c -m 644 examples/c/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c'
+ /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/calc'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/calc'
- /usr/bin/install -c -m 644 examples/d/calc/calc.y examples/d/calc/Makefile '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/calc'
+ /usr/bin/install -c -m 644 examples/c/calc/calc.y examples/c/calc/Makefile examples/c/calc/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/calc'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++'
- /usr/bin/install -c -m 644 examples/c++/calc++/README.md examples/c++/calc++/Makefile '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++'
+ /usr/bin/install -c -m 644 examples/d/calc/calc.y examples/d/calc/Makefile '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/calc'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d'
- /usr/bin/install -c -m 644 examples/c++/README.md examples/c++/Makefile examples/c++/variant.yy examples/c++/variant-11.yy '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++'
- /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison'
  /usr/bin/install -c -m 644 examples/d/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d'
+ /usr/bin/install -c -m 644 examples/c++/README.md examples/c++/Makefile examples/c++/variant.yy examples/c++/variant-11.yy '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++'
+ /usr/bin/install -c -m 644 examples/c++/calc++/README.md examples/c++/calc++/Makefile '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples'
- /usr/bin/install -c -m 644 examples/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples'
+ /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/glr'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java'
+ /usr/bin/install -c -m 644 examples/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples'
  /usr/bin/install -c -m 644 AUTHORS COPYING NEWS README THANKS TODO '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/calc'
  /usr/bin/install -c -m 644 examples/java/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java'
+ /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/simple'
  /usr/bin/install -c -m 644 examples/c/glr/c++-types.y examples/c/glr/Makefile examples/c/glr/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/glr'
  /usr/bin/install -c -m 644 examples/java/calc/Calc.y examples/java/calc/Makefile '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/calc'
- /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/simple'
- /usr/bin/install -c -m 644 examples/java/simple/Calc.y examples/java/simple/Makefile '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/simple'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/lexcalc'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/m4sugar'
- /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc'
+ /usr/bin/install -c -m 644 examples/java/simple/Calc.y examples/java/simple/Makefile '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/simple'
  /usr/bin/install -c -m 644 examples/c/lexcalc/parse.y examples/c/lexcalc/scan.l examples/c/lexcalc/Makefile examples/c/lexcalc/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/lexcalc'
- /usr/bin/install -c -m 644 examples/c/mfcalc/Makefile '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc'
- /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison'
+ /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc'
  /usr/bin/install -c -m 644 data/m4sugar/foreach.m4 data/m4sugar/m4sugar.m4 '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/m4sugar'
+ /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison'
+ /usr/bin/install -c -m 644 examples/c/mfcalc/Makefile '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/pushcalc'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/reccalc'
- /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc'
  /usr/bin/install -c -m 644 data/README.md data/bison-default.css '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison'
+ /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc'
+ /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/simple'
  /usr/bin/install -c -m 644 examples/c/pushcalc/calc.y examples/c/pushcalc/Makefile examples/c/pushcalc/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/pushcalc'
- /usr/bin/install -c -m 644 examples/c/reccalc/parse.y examples/c/reccalc/scan.l examples/c/reccalc/Makefile examples/c/reccalc/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/reccalc'
  /usr/bin/install -c -m 644 examples/c/rpcalc/Makefile '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc'
- /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/simple'
  /usr/bin/install -c -m 644 examples/d/simple/calc.y examples/d/simple/Makefile '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/simple'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/skeletons'
 /usr/bin/mkdir -p doc
+ /usr/bin/install -c -m 644 examples/c/reccalc/parse.y examples/c/reccalc/scan.l examples/c/reccalc/Makefile examples/c/reccalc/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/reccalc'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/xslt'
+LC_ALL=C tests/bison --version >doc/bison.help.tmp
  /usr/bin/install -c -m 644 data/skeletons/bison.m4 data/skeletons/c++-skel.m4 data/skeletons/c++.m4 data/skeletons/c-like.m4 data/skeletons/c-skel.m4 data/skeletons/c.m4 data/skeletons/glr.c data/skeletons/glr.cc data/skeletons/glr2.cc data/skeletons/java-skel.m4 data/skeletons/java.m4 data/skeletons/lalr1.cc data/skeletons/lalr1.java data/skeletons/location.cc data/skeletons/stack.hh data/skeletons/traceon.m4 data/skeletons/variant.hh data/skeletons/yacc.c data/skeletons/d-skel.m4 data/skeletons/d.m4 data/skeletons/lalr1.d '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/skeletons'
  /usr/bin/install -c -m 644 data/xslt/bison.xsl data/xslt/xml2dot.xsl data/xslt/xml2text.xsl data/xslt/xml2xhtml.xsl '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/xslt'
-LC_ALL=C tests/bison --version >doc/bison.help.tmp
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc'
- /usr/bin/install -c -m 644 examples/c/mfcalc/calc.h examples/c/mfcalc/mfcalc.y '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc'
- /usr/bin/install -c -m 644 examples/c/rpcalc/rpcalc.y '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc'
 LC_ALL=C tests/bison --help | \
   sed -e 's,^Usage: .*/bison \[OPTION\],Usage: bison [OPTION],g' \
       -e '/translation bugs/d'  >>doc/bison.help.tmp
+ /usr/bin/install -c -m 644 examples/c/rpcalc/rpcalc.y '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc'
+ /usr/bin/install -c -m 644 examples/c/mfcalc/calc.h examples/c/mfcalc/mfcalc.y '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc'
 ./build-aux/move-if-change doc/bison.help.tmp doc/bison.help
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/man/man1'
  /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/info'
@@ -271801,104 +271838,104 @@
 	Normalized debian/bison/usr/share/locale/be/LC_MESSAGES/bison-gnulib.mo
 	Normalized debian/bison/usr/share/locale/ast/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/af/LC_MESSAGES/bison-gnulib.mo
-	Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison.mo
 	Normalized debian/bison/usr/share/locale/th/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/ta/LC_MESSAGES/bison-runtime.mo
+	Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison-gnulib.mo
 	Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison-gnulib.mo
-	Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison.mo
 	Normalized debian/bison/usr/share/locale/sq/LC_MESSAGES/bison-runtime.mo
-	Normalized debian/bison/usr/share/locale/sl/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/sl/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/sl/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/sk/LC_MESSAGES/bison-gnulib.mo
 	Normalized debian/bison/usr/share/locale/rw/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison-gnulib.mo
 	Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/zh_CN/LC_MESSAGES/bison-gnulib.mo
 	Normalized debian/bison/usr/share/locale/zh_CN/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/zh_CN/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/zh_CN/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison-gnulib.mo
 	Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison-gnulib.mo
 	Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison-gnulib.mo
 	Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison-runtime.mo
-	Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison-runtime.mo
-	Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison-runtime.mo
-	Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison-gnulib.mo
-	Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison-runtime.mo
-	Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison-gnulib.mo
-	Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison-runtime.mo
-	Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison-gnulib.mo
-	Normalized debian/bison/usr/share/locale/lv/LC_MESSAGES/bison-runtime.mo
-	Normalized debian/bison/usr/share/locale/lt/LC_MESSAGES/bison-runtime.mo
-	Normalized debian/bison/usr/share/locale/ky/LC_MESSAGES/bison-runtime.mo
-	Normalized debian/bison/usr/share/locale/ko/LC_MESSAGES/bison-gnulib.mo
-	Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison.mo
+	Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison-gnulib.mo
 	Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison-gnulib.mo
-	Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison.mo
 	Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison-runtime.mo
+	Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison-gnulib.mo
 	Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/eo/LC_MESSAGES/bison-gnulib.mo
 	Normalized debian/bison/usr/share/locale/eo/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/eo/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/eo/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/el/LC_MESSAGES/bison-gnulib.mo
 	Normalized debian/bison/usr/share/locale/el/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/el/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/el/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison-gnulib.mo
 	Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison-gnulib.mo
 	Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison-gnulib.mo
 	Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison-runtime.mo
-	Normalized debian/bison/usr/share/locale/gl/LC_MESSAGES/bison-gnulib.mo
-	Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison-runtime.mo
-	Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison-gnulib.mo
-	Normalized debian/bison/usr/share/locale/fr/LC_MESSAGES/bison-runtime.mo
-	Normalized debian/bison/usr/share/locale/fr/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/fr/LC_MESSAGES/bison-gnulib.mo
-	Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison-runtime.mo
-	Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison-gnulib.mo
-	Normalized debian/bison/usr/share/locale/eu/LC_MESSAGES/bison-gnulib.mo
-	Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison-runtime.mo
-	Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison.mo
+	Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison-runtime.mo
+	Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison.mo
+	Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison-runtime.mo
+	Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison.mo
+	Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison-runtime.mo
+	Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison.mo
+	Normalized debian/bison/usr/share/locale/lv/LC_MESSAGES/bison-runtime.mo
+	Normalized debian/bison/usr/share/locale/lt/LC_MESSAGES/bison-runtime.mo
+	Normalized debian/bison/usr/share/locale/ky/LC_MESSAGES/bison-runtime.mo
+	Normalized debian/bison/usr/share/locale/ko/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison-gnulib.mo
 	Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/it/LC_MESSAGES/bison-gnulib.mo
 	Normalized debian/bison/usr/share/locale/it/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/it/LC_MESSAGES/bison.mo
-	Normalized debian/bison/usr/share/locale/it/LC_MESSAGES/bison-gnulib.mo
 	Normalized debian/bison/usr/share/locale/id/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/id/LC_MESSAGES/bison.mo
 	Normalized debian/bison/usr/share/locale/ia/LC_MESSAGES/bison-runtime.mo
-	Normalized debian/bison/usr/share/locale/hu/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/hu/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/hu/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/hr/LC_MESSAGES/bison-runtime.mo
 	Normalized debian/bison/usr/share/locale/hr/LC_MESSAGES/bison.mo
+	Normalized debian/bison/usr/share/locale/gl/LC_MESSAGES/bison-gnulib.mo
 	Normalized debian/bison/usr/share/locale/gl/LC_MESSAGES/bison-runtime.mo
+	Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison-runtime.mo
+	Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison.mo
+	Normalized debian/bison/usr/share/locale/fr/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/fr/LC_MESSAGES/bison-runtime.mo
+	Normalized debian/bison/usr/share/locale/fr/LC_MESSAGES/bison.mo
+	Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison-runtime.mo
+	Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison.mo
+	Normalized debian/bison/usr/share/locale/eu/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison-gnulib.mo
+	Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison-runtime.mo
+	Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison.mo
    dh_compress
    dh_fixperms
    dh_missing
@@ -271910,9 +271947,9 @@
    dh_gencontrol
    dh_md5sums
    dh_builddeb
+dpkg-deb: building package 'bison' in '../bison_3.8.2+dfsg-1_arm64.deb'.
 dpkg-deb: building package 'bison-dbgsym' in '../bison-dbgsym_3.8.2+dfsg-1_arm64.deb'.
 dpkg-deb: building package 'libbison-dev' in '../libbison-dev_3.8.2+dfsg-1_arm64.deb'.
-dpkg-deb: building package 'bison' in '../bison_3.8.2+dfsg-1_arm64.deb'.
  dpkg-genbuildinfo --build=binary -O../bison_3.8.2+dfsg-1_arm64.buildinfo
  dpkg-genchanges --build=binary -O../bison_3.8.2+dfsg-1_arm64.changes
 dpkg-genchanges: info: binary-only upload (no source code included)
@@ -271920,12 +271957,14 @@
 dpkg-buildpackage: info: binary-only upload (no source included)
 dpkg-genchanges: info: including full source code in upload
 I: copying local configuration
+I: user script /srv/workspace/pbuilder/4296/tmp/hooks/B01_cleanup starting
+I: user script /srv/workspace/pbuilder/4296/tmp/hooks/B01_cleanup finished
 I: unmounting dev/ptmx filesystem
 I: unmounting dev/pts filesystem
 I: unmounting dev/shm filesystem
 I: unmounting proc filesystem
 I: unmounting sys filesystem
 I: cleaning the build env 
-I: removing directory /srv/workspace/pbuilder/9306 and its subdirectories
-I: Current time: Thu May 18 03:57:34 -12 2023
-I: pbuilder-time-stamp: 1684425454
+I: removing directory /srv/workspace/pbuilder/4296 and its subdirectories
+I: Current time: Thu Jun 20 12:48:35 +14 2024
+I: pbuilder-time-stamp: 1718837315